diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e43b0f9 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +.DS_Store diff --git a/Assets/GuruKCP/Editor/__PLACEHOLDER__ b/Assets/GuruKCP/Editor/__PLACEHOLDER__ deleted file mode 100644 index e69de29..0000000 diff --git a/Assets/GuruKCP/Editor/__PLACEHOLDER__.meta b/Assets/GuruKCP/Editor/__PLACEHOLDER__.meta deleted file mode 100644 index b5b54aa..0000000 --- a/Assets/GuruKCP/Editor/__PLACEHOLDER__.meta +++ /dev/null @@ -1,3 +0,0 @@ -fileFormatVersion: 2 -guid: 68f75ad8b6624e2ca86deea09d90452e -timeCreated: 1693367821 \ No newline at end of file diff --git a/Assets/GuruKCP/Runtime/__PLACEHOLDER__ b/Assets/GuruKCP/Runtime/__PLACEHOLDER__ deleted file mode 100644 index e69de29..0000000 diff --git a/Assets/GuruKCP/Runtime/__PLACEHOLDER__.meta b/Assets/GuruKCP/Runtime/__PLACEHOLDER__.meta deleted file mode 100644 index f77dba7..0000000 --- a/Assets/GuruKCP/Runtime/__PLACEHOLDER__.meta +++ /dev/null @@ -1,3 +0,0 @@ -fileFormatVersion: 2 -guid: 0cce6bb95d7a4c62a10114242afa5176 -timeCreated: 1693367793 \ No newline at end of file diff --git a/Assets/GuruKCP/Editor.meta b/Editor.meta similarity index 100% rename from Assets/GuruKCP/Editor.meta rename to Editor.meta diff --git a/Editor/GuruKCP.Editor.asmdef b/Editor/GuruKCP.Editor.asmdef new file mode 100644 index 0000000..33d0b06 --- /dev/null +++ b/Editor/GuruKCP.Editor.asmdef @@ -0,0 +1,18 @@ +{ + "name": "Protobuf-net.Editor", + "rootNamespace": "", + "references": [ + "GUID:832e7ae06a4304a17a11ca2f7b21373d" + ], + "includePlatforms": [ + "Editor" + ], + "excludePlatforms": [], + "allowUnsafeCode": true, + "overrideReferences": false, + "precompiledReferences": [], + "autoReferenced": true, + "defineConstraints": [], + "versionDefines": [], + "noEngineReferences": false +} \ No newline at end of file diff --git a/Editor/GuruKCP.Editor.asmdef.meta b/Editor/GuruKCP.Editor.asmdef.meta new file mode 100644 index 0000000..82dbfb5 --- /dev/null +++ b/Editor/GuruKCP.Editor.asmdef.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: b8a90c429cd0f45168d25ef7957cc732 +AssemblyDefinitionImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/Proto2CSEditor.cs b/Editor/Proto2CSEditor.cs new file mode 100644 index 0000000..81b9867 --- /dev/null +++ b/Editor/Proto2CSEditor.cs @@ -0,0 +1,248 @@ +// +// Proto2CSEditor.cs +// +// Author: +// JasonXuDeveloper(傑) +// +// Copyright (c) 2020 JEngine +// +// Permission is hereby granted, free of charge, to any person obtaining a copy +// of this software and associated documentation files (the "Software"), to deal +// in the Software without restriction, including without limitation the rights +// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +// copies of the Software, and to permit persons to whom the Software is +// furnished to do so, subject to the following conditions: +// +// The above copyright notice and this permission notice shall be included in +// all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +// THE SOFTWARE. + +using System.IO; +using Google.Protobuf.Reflection; +using ProtoBuf.Reflection; +using UnityEditor; +using UnityEngine; + +namespace Guru +{ + internal class Proto2CSEditor : EditorWindow + { + private static Proto2CSEditor win; + + //[MenuItem("Tools/Protobuf/Show Generate Window")] + //public static void ShowCSGenerateWindow() + //{ + // int index = Application.dataPath.LastIndexOf('/'); + // var proto_dir = $"{Application.dataPath.Substring(0, index)}/ServerProtos"; + // win = GetWindow("Proto2CS Generator"); + // win.folder = EditorUtility.OpenFolderPanel("Please select proto files directory", + // proto_dir, ""); + // win.minSize = new Vector2(500, 500); + // win.Show(); + //} + + [MenuItem("Tools/Protobuf/Generate All")] + public static void GenerateAllProtos() + { + int index = Application.dataPath.LastIndexOf('/'); + var proto_dir = $"{Application.dataPath.Substring(0, index)}/ServerProtos"; + var file_list = GetAllProtoFiles(proto_dir); + var dest_folder = $"{Application.dataPath}/../Assets/Scripts/NetworkGen"; + + if (Directory.Exists(dest_folder)) + { + Directory.Delete(dest_folder, true); + + // Just in case the system has output_folder still locked for deletion + while (Directory.Exists(dest_folder)) + { + System.Threading.Thread.Sleep(10); + } + } + + Directory.CreateDirectory(dest_folder); + + Generate(proto_dir, file_list, dest_folder); + } + + [MenuItem("Tools/Protobuf/View Proto Files")] + private static void ViewDataPath() + { + int index = Application.dataPath.LastIndexOf('/'); + var proto_dir = $"{Application.dataPath.Substring(0, index)}/ServerProtos"; + + if (!Directory.Exists(proto_dir)) + { + Directory.CreateDirectory(proto_dir); + } + + EditorUtility.OpenWithDefaultApp(proto_dir); + } + + [SerializeField] protected string[] _fileList = new string[0]; + protected string folder; + protected SerializedObject _serializedObject; + protected SerializedProperty _fileListProperty; + + + protected void OnEnable() + { + //使用当前类初始化 + _serializedObject = new SerializedObject(this); + //获取当前类中可序列话的属性 + _fileListProperty = _serializedObject.FindProperty("_fileList"); + } + + protected void OnGUI() + { + //绘制标题 + GUILayout.Space(10); + GUIStyle textStyle = new GUIStyle(); + textStyle.fontSize = 24; + textStyle.normal.textColor = Color.white; + textStyle.alignment = TextAnchor.MiddleCenter; + GUILayout.Label("Proto文件转CS文件", textStyle); + textStyle.fontSize = 18; + GUILayout.Label("Proto file to CS file", textStyle); + GUILayout.Space(10); + + /* + * 路径 + */ + GUILayout.Label("Proto file folder Proto文件路径"); + GUILayout.BeginHorizontal(); + EditorGUI.BeginDisabledGroup(true); + folder = EditorGUILayout.TextField(folder); + EditorGUI.EndDisabledGroup(); + + GUILayout.Space(10); + if (GUILayout.Button("Select Path 选择路径", GUILayout.ExpandWidth(false))) + { + int index = Application.dataPath.LastIndexOf('/'); + var proto_dir = $"{Application.dataPath.Substring(0, index)}/ServerProtos"; + + folder = EditorUtility.OpenFolderPanel("Select proto files source 请选择proto文件路径", proto_dir, ""); + } + + GUILayout.EndHorizontal(); + + /* + * 文件 + */ + GUILayout.Space(10); + GUILayout.Label("Files to convert 需转换文件"); + //更新 + _serializedObject.Update(); + //开始检查是否有修改 + EditorGUI.BeginChangeCheck(); + //显示属性 + EditorGUILayout.PropertyField(_fileListProperty, true); + + //结束检查是否有修改 + if (EditorGUI.EndChangeCheck()) + { + //提交修改 + _serializedObject.ApplyModifiedProperties(); + } + + /* + * 按钮 + */ + GUILayout.Space(50); + if (GUILayout.Button("Match all files from folder 从文件夹中匹配全部文件")) + { + _fileList = GetAllProtoFiles(folder); + _serializedObject.Update(); + } + + GUILayout.Space(10); + if (GUILayout.Button("Generate 生成")) + { + var dest_folder = $"{Application.dataPath}/Gen/Network"; + //Generate(folder, _fileList, dest_folder); + } + } + + private static string[] GetAllProtoFiles(string path) + { + if (string.IsNullOrEmpty(path)) + { + Debug.LogError($"Folder path is empty!"); + return null; + } + + var file_list = Directory.GetFiles(path, "*.proto", SearchOption.AllDirectories); + var file_name_list = new string[file_list.Length]; + + for (int i = 0; i < file_list.Length; i++) + { + file_name_list[i] = Path.GetFileName(file_list[i]); + } + + return file_name_list; + } + + private static void Generate(string inpath, string[] inprotos, string outpath) + { + if (!Directory.Exists(outpath)) + { + Directory.CreateDirectory(outpath); + } + + var set = new FileDescriptorSet(); + set.AddImportPath(inpath); + foreach (var inproto in inprotos) + { + var s = inproto; + if (!inproto.Contains(".proto")) + { + s += ".proto"; + } + + set.Add(s, true); + } + + set.Process(); + var errors = set.GetErrors(); + CSharpCodeGenerator.ClearTypeNames(); + var files = CSharpCodeGenerator.Default.Generate(set); + + foreach (var file in files) + { + CSharpCodeGenerator.ClearTypeNames(); + var full_file_name = file.Name; + int index = full_file_name.LastIndexOf('.'); + var file_name = index > 0 ? full_file_name.Substring(0, index) : full_file_name; + file_name = file_name.ToLower(); + var dest_filename = $"{NameNormalizer.AutoCapitalize(file_name)}.cs"; + //var path = Path.Combine(outpath, file.Name); + var path = Path.Combine(outpath, dest_filename); + File.WriteAllText(path, file.Text); + + Debug.Log($"Generated cs file for {full_file_name.Replace(".cs", ".proto")} successfully to: {path}"); + } + + EditorUtility.DisplayDialog("Complete", + "Proto文件已转CS,详细请看控制台输出" + + "\n" + + "Proto files has been convert into CS files, please go to console and view details", + "Close window"); + + if (win != null) + { + win.Close(); + } + + AssetDatabase.Refresh(); + } + + } + +} \ No newline at end of file diff --git a/Editor/Proto2CSEditor.cs.meta b/Editor/Proto2CSEditor.cs.meta new file mode 100644 index 0000000..9a2b4b7 --- /dev/null +++ b/Editor/Proto2CSEditor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 0995aad32e8324ae48ae9e676c5bf38c +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection.meta b/Editor/protobuf-net.Reflection.meta new file mode 100644 index 0000000..cd4629d --- /dev/null +++ b/Editor/protobuf-net.Reflection.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 6def9201877ce4d4797b547c57cdeee0 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/CSharpCodeGenerator.cs b/Editor/protobuf-net.Reflection/CSharpCodeGenerator.cs new file mode 100644 index 0000000..9a91dc4 --- /dev/null +++ b/Editor/protobuf-net.Reflection/CSharpCodeGenerator.cs @@ -0,0 +1,787 @@ +using Google.Protobuf.Reflection; +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text; + +namespace ProtoBuf.Reflection +{ + /// + /// A coded generator that writes C# + /// + public class CSharpCodeGenerator : CommonCodeGenerator + { + /// + /// Reusable code-generator instance + /// + public static CSharpCodeGenerator Default { get; } = new CSharpCodeGenerator(); + /// + /// Create a new CSharpCodeGenerator instance + /// + protected CSharpCodeGenerator() { } + /// + /// Returns the language name + /// + public override string Name => "C#"; + /// + /// Returns the default file extension + /// + protected override string DefaultFileExtension => "cs"; + /// + /// Escapes language keywords + /// + protected override string Escape(string identifier) + { + switch (identifier) + { + case "abstract": + case "event": + case "new": + case "struct": + case "as": + case "explicit": + case "null": + case "switch": + case "base": + case "extern": + case "object": + case "this": + case "bool": + case "false": + case "operator": + case "throw": + case "break": + case "finally": + case "out": + case "true": + case "byte": + case "fixed": + case "override": + case "try": + case "case": + case "float": + case "params": + case "typeof": + case "catch": + case "for": + case "private": + case "uint": + case "char": + case "foreach": + case "protected": + case "ulong": + case "checked": + case "goto": + case "public": + case "unchecked": + case "class": + case "if": + case "readonly": + case "unsafe": + case "const": + case "implicit": + case "ref": + case "ushort": + case "continue": + case "in": + case "return": + case "using": + case "decimal": + case "int": + case "sbyte": + case "virtual": + case "default": + case "interface": + case "sealed": + case "volatile": + case "delegate": + case "internal": + case "short": + case "void": + case "do": + case "is": + case "sizeof": + case "while": + case "double": + case "lock": + case "stackalloc": + case "else": + case "long": + case "static": + case "enum": + case "namespace": + case "string": + return "@" + identifier; + default: + return identifier; + } + } + /// + /// Start a file + /// + protected override void WriteFileHeader(GeneratorContext ctx, FileDescriptorProto file, ref object state) + { + ctx.WriteLine("// This file was generated by a tool; you should avoid making direct changes.") + .WriteLine("// Consider using 'partial classes' to extend these types") + .WriteLine($"// Input: {Path.GetFileName(ctx.File.Name)}").WriteLine() + .WriteLine("#pragma warning disable CS1591, CS0612, CS3021").WriteLine(); + + + var @namespace = ctx.NameNormalizer.GetName(file); + + if (!string.IsNullOrWhiteSpace(@namespace)) + { + state = @namespace; + ctx.WriteLine($"namespace {@namespace}"); + ctx.WriteLine("{").Indent().WriteLine(); + } + + } + /// + /// End a file + /// + protected override void WriteFileFooter(GeneratorContext ctx, FileDescriptorProto file, ref object state) + { + var @namespace = (string)state; + if (!string.IsNullOrWhiteSpace(@namespace)) + { + ctx.Outdent().WriteLine("}").WriteLine(); + } + + ctx.WriteLine("#pragma warning restore CS1591, CS0612, CS3021"); + } + /// + /// Start an enum + /// + protected override void WriteEnumHeader(GeneratorContext ctx, EnumDescriptorProto obj, ref object state) + { + var name = ctx.NameNormalizer.GetName(obj); + var tw = ctx.Write($@"[global::ProtoBuf.ProtoContract("); + if (name != obj.Name) tw.Write($@"Name = @""{obj.Name}"""); + tw.WriteLine(")]"); + WriteOptions(ctx, obj.Options); + ctx.WriteLine($"{GetAccess(GetAccess(obj))} enum {Escape(name)}").WriteLine("{").Indent(); + } + /// + /// End an enum + /// + + protected override void WriteEnumFooter(GeneratorContext ctx, EnumDescriptorProto obj, ref object state) + { + ctx.Outdent().WriteLine("}").WriteLine(); + } + /// + /// Write an enum value + /// + protected override void WriteEnumValue(GeneratorContext ctx, EnumValueDescriptorProto obj, ref object state) + { + var name = ctx.NameNormalizer.GetName(obj); + if (name != obj.Name) + { + var tw = ctx.Write($@"[global::ProtoBuf.ProtoEnum("); + tw.Write($@"Name = @""{obj.Name}"""); + tw.WriteLine(")]"); + } + + WriteOptions(ctx, obj.Options); + ctx.WriteLine($"{Escape(name)} = {obj.Number},"); + } + + /// + /// End a message + /// + protected override void WriteMessageFooter(GeneratorContext ctx, DescriptorProto obj, ref object state) + { + ctx.Outdent().WriteLine("}").WriteLine(); + } + /// + /// Start a message + /// + protected override void WriteMessageHeader(GeneratorContext ctx, DescriptorProto obj, ref object state) + { + var name = ctx.NameNormalizer.GetName(obj); + GetTypeName2(obj.FullyQualifiedName); + var tw = ctx.Write($@"[global::ProtoBuf.ProtoContract("); + if (name != obj.Name) tw.Write($@"Name = @""{obj.Name}"""); + tw.WriteLine(")]"); + WriteOptions(ctx, obj.Options); + tw = ctx.Write($"{GetAccess(GetAccess(obj))} partial class {Escape(name)}"); + if (obj.ExtensionRanges.Count != 0) tw.Write(" : global::ProtoBuf.IExtensible"); + tw.WriteLine(); + ctx.WriteLine("{").Indent(); + if (obj.Options?.MessageSetWireFormat == true) + { + ctx.WriteLine("#error message_set_wire_format is not currently implemented").WriteLine(); + } + if (obj.ExtensionRanges.Count != 0) + { + ctx.WriteLine($"private global::ProtoBuf.IExtension {FieldPrefix}extensionData;") + .WriteLine($"global::ProtoBuf.IExtension global::ProtoBuf.IExtensible.GetExtensionObject(bool createIfMissing)").Indent() + .WriteLine($"=> global::ProtoBuf.Extensible.GetExtensionObject(ref {FieldPrefix}extensionData, createIfMissing);").Outdent().WriteLine(); + } + } + + private static void WriteOptions(GeneratorContext ctx, T obj) where T : class, ISchemaOptions + { + if (obj == null) return; + if (obj.Deprecated) + { + ctx.WriteLine($"[global::System.Obsolete]"); + } + } + + const string FieldPrefix = "__pbn__"; + + /// + /// Get the language specific keyword representing an access level + /// + public override string GetAccess(Access access) + { + switch (access) + { + case Access.Internal: return "internal"; + case Access.Public: return "public"; + case Access.Private: return "private"; + default: return base.GetAccess(access); + } + } + static HashSet TypeNames2 = new HashSet(); + static string GetTypeName2(string type) { + if (type.StartsWith(".")) { type = type.Substring(1); } + TypeNames2.Add(type); + return type; + } + public static void ClearTypeNames(){ + TypeNames2.Clear (); + } + /// + /// Write a field + /// + protected override void WriteField(GeneratorContext ctx, FieldDescriptorProto obj, ref object state, OneOfStub[] oneOfs) + { + string dataFormat; + var typeName = GetTypeName(ctx, obj, out dataFormat, out var isMap); + if (isMap) + { + return; + } + var name = ctx.NameNormalizer.GetName(obj); + var tw = ctx.Write($@"[global::ProtoBuf.ProtoMember({obj.Number}"); + + if (!string.IsNullOrWhiteSpace(dataFormat)) + { + tw.Write($", (int)global::ProtoBuf.DataFormat.{dataFormat}"); + } + if (name != obj.Name) + { + tw.Write($@", Name = @""{obj.Name}"""); + } + var options = obj.Options?.GetOptions(); + if (options?.AsReference == true) + { + tw.Write($@", AsReference = true"); + } + if (options?.DynamicType == true) + { + tw.Write($@", DynamicType = true"); + } + + bool isOptional = obj.label == FieldDescriptorProto.Label.LabelOptional; + bool isRepeated = obj.label == FieldDescriptorProto.Label.LabelRepeated; + // Only needed by ILRuntime + /*if (isRepeated && obj.type == FieldDescriptorProto.Type.TypeMessage) + { + tw.Write($@", TypeName = ""{GetTypeName2(obj.TypeName)}"""); + }*/ + OneOfStub oneOf = obj.ShouldSerializeOneofIndex() ? oneOfs?[obj.OneofIndex] : null; + if (oneOf != null && oneOf.CountTotal == 1) + { + oneOf = null; // not really a one-of, then! + } + bool explicitValues = isOptional && oneOf == null && ctx.Syntax == FileDescriptorProto.SyntaxProto2 + && obj.type != FieldDescriptorProto.Type.TypeMessage + && obj.type != FieldDescriptorProto.Type.TypeGroup; + + + string defaultValue = null; + bool suppressDefaultAttribute = !isOptional; + if (isOptional || obj.type == FieldDescriptorProto.Type.TypeEnum) + { + //GetTypeName2(obj.TypeName); + defaultValue = obj.DefaultValue; + + if (obj.type == FieldDescriptorProto.Type.TypeString) + { + defaultValue = string.IsNullOrEmpty(defaultValue) ? "\"\"" + : ("@\"" + (defaultValue ?? "").Replace("\"", "\"\"") + "\""); + } + else if (obj.type == FieldDescriptorProto.Type.TypeDouble) + { + switch (defaultValue) + { + case "inf": defaultValue = "double.PositiveInfinity"; break; + case "-inf": defaultValue = "double.NegativeInfinity"; break; + case "nan": defaultValue = "double.NaN"; break; + } + } + else if (obj.type == FieldDescriptorProto.Type.TypeFloat) + { + switch (defaultValue) + { + case "inf": defaultValue = "float.PositiveInfinity"; break; + case "-inf": defaultValue = "float.NegativeInfinity"; break; + case "nan": defaultValue = "float.NaN"; break; + } + } + else if (obj.type == FieldDescriptorProto.Type.TypeEnum) + { + var enumType = ctx.TryFind(obj.TypeName); + if (enumType != null) + { + EnumValueDescriptorProto found = null; + if (!string.IsNullOrEmpty(defaultValue)) + { + found = enumType.Values.FirstOrDefault(x => x.Name == defaultValue); + } + else if (ctx.Syntax == FileDescriptorProto.SyntaxProto2) + { + // find the first one; if that is a zero, we don't need it after all + found = enumType.Values.FirstOrDefault(); + if(found != null && found.Number == 0) + { + if(!isOptional) found = null; // we don't need it after all + } + } + // for proto3 the default is 0, so no need to do anything - GetValueOrDefault() will do it all + + if (found != null) + { + defaultValue = ctx.NameNormalizer.GetName(found); + } + if (!string.IsNullOrWhiteSpace(defaultValue)) + { + //defaultValue = ctx.NameNormalizer.GetName(enumType) + "." + defaultValue; + + defaultValue = "global::"+enumType.FullyQualifiedName.Substring(1) + "." + defaultValue; + } + } + } + } + + if (obj.IsPacked(ctx.Syntax)) + { + tw.Write($", IsPacked = true"); + } + if (obj.label == FieldDescriptorProto.Label.LabelRequired) + { + tw.Write($", IsRequired = true"); + } + tw.WriteLine(")]"); + if (!isRepeated && !string.IsNullOrWhiteSpace(defaultValue) && !suppressDefaultAttribute) + { + ctx.WriteLine($"[global::System.ComponentModel.DefaultValue({defaultValue})]"); + } + WriteOptions(ctx, obj.Options); + if (isRepeated) + { + var mapMsgType = isMap ? ctx.TryFind(obj.TypeName) : null; + if (mapMsgType != null) + { + string keyDataFormat; + bool _; + var keyTypeName = GetTypeName(ctx, mapMsgType.Fields.Single(x => x.Number == 1), + out keyDataFormat, out _); + string valueDataFormat; + var valueTypeName = GetTypeName(ctx, mapMsgType.Fields.Single(x => x.Number == 2), + out valueDataFormat, out _); + + bool first = true; + tw = ctx.Write($"[global::ProtoBuf.ProtoMap"); + if (!string.IsNullOrWhiteSpace(keyDataFormat)) + { + tw.Write($"{(first ? "(" : ", ")}KeyFormat = global::ProtoBuf.DataFormat.{keyDataFormat}"); + first = false; + } + if (!string.IsNullOrWhiteSpace(valueDataFormat)) + { + tw.Write($"{(first ? "(" : ", ")}ValueFormat = global::ProtoBuf.DataFormat.{valueDataFormat}"); + first = false; + } + tw.WriteLine(first ? "]" : ")]"); + ctx.WriteLine($"{GetAccess(GetAccess(obj))} global::System.Collections.Generic.Dictionary<{keyTypeName}, {valueTypeName}> {Escape(name)} {{ get; }} = new global::System.Collections.Generic.Dictionary<{keyTypeName}, {valueTypeName}>();"); + } + else if (UseArray(obj)) + { + ctx.WriteLine($"{GetAccess(GetAccess(obj))} {typeName}[] {Escape(name)} {{ get; set; }}"); + } + else + { + ctx.WriteLine($"{GetAccess(GetAccess(obj))} global::System.Collections.Generic.List<{typeName}> {Escape(name)} {{ get; }} = new global::System.Collections.Generic.List<{typeName}>();"); + } + } + else if (oneOf != null) + { + var defValue = string.IsNullOrWhiteSpace(defaultValue) ? $"default({typeName})" : defaultValue; + var fieldName = FieldPrefix + oneOf.OneOf.Name; + var storage = oneOf.GetStorage(obj.type, obj.TypeName); + ctx.WriteLine($"{GetAccess(GetAccess(obj))} {typeName} {Escape(name)}").WriteLine("{").Indent(); + + switch (obj.type) + { + case FieldDescriptorProto.Type.TypeMessage: + case FieldDescriptorProto.Type.TypeGroup: + case FieldDescriptorProto.Type.TypeEnum: + case FieldDescriptorProto.Type.TypeBytes: + case FieldDescriptorProto.Type.TypeString: + ctx.WriteLine($"get {{ return {fieldName}.Is({obj.Number}) ? (({typeName}){fieldName}.{storage}) : {defValue}; }}"); + break; + default: + ctx.WriteLine($"get {{ return {fieldName}.Is({obj.Number}) ? {fieldName}.{storage} : {defValue}; }}"); + break; + } + var unionType = oneOf.GetUnionType(); + ctx.WriteLine($"set {{ {fieldName} = new global::ProtoBuf.{unionType}({obj.Number}, value); }}") + .Outdent().WriteLine("}") + .WriteLine($"{GetAccess(GetAccess(obj))} bool ShouldSerialize{name}() => {fieldName}.Is({obj.Number});") + .WriteLine($"{GetAccess(GetAccess(obj))} void Reset{name}() => global::ProtoBuf.{unionType}.Reset(ref {fieldName}, {obj.Number});"); + + if (oneOf.IsFirst()) + { + ctx.WriteLine().WriteLine($"private global::ProtoBuf.{unionType} {fieldName};"); + } + } + else if (explicitValues) + { + string fieldName = FieldPrefix + name, fieldType; + bool isRef = false; + switch (obj.type) + { + case FieldDescriptorProto.Type.TypeString: + case FieldDescriptorProto.Type.TypeBytes: + fieldType = typeName; + isRef = true; + break; + default: + fieldType = typeName + "?"; + break; + } + ctx.WriteLine($"{GetAccess(GetAccess(obj))} {typeName} {Escape(name)}").WriteLine("{").Indent(); + tw = ctx.Write($"get {{ return {fieldName}"); + if (!string.IsNullOrWhiteSpace(defaultValue)) + { + tw.Write(" ?? "); + tw.Write(defaultValue); + } + else if (!isRef) + { + tw.Write(".GetValueOrDefault()"); + } + tw.WriteLine("; }"); + ctx.WriteLine($"set {{ {fieldName} = value; }}") + .Outdent().WriteLine("}") + .WriteLine($"{GetAccess(GetAccess(obj))} bool ShouldSerialize{name}() => {fieldName} != null;") + .WriteLine($"{GetAccess(GetAccess(obj))} void Reset{name}() => {fieldName} = null;") + .WriteLine($"private {fieldType} {fieldName};"); + } + else + { + tw = ctx.Write($"{GetAccess(GetAccess(obj))} {typeName} {Escape(name)} {{ get; set; }}"); + if (!string.IsNullOrWhiteSpace(defaultValue)) tw.Write($" = {defaultValue};"); + tw.WriteLine(); + } + ctx.WriteLine(); + } + /// + /// Starts an extgensions block + /// + protected override void WriteExtensionsHeader(GeneratorContext ctx, FileDescriptorProto obj, ref object state) + { + var name = obj?.Options?.GetOptions()?.ExtensionTypeName; + if (string.IsNullOrWhiteSpace(name)) name = "Extensions"; + ctx.WriteLine($"{GetAccess(GetAccess(obj))} static class {Escape(name)}").WriteLine("{").Indent(); + } + /// + /// Ends an extgensions block + /// + protected override void WriteExtensionsFooter(GeneratorContext ctx, FileDescriptorProto obj, ref object state) + { + ctx.Outdent().WriteLine("}"); + } + /// + /// Starts an extensions block + /// + protected override void WriteExtensionsHeader(GeneratorContext ctx, DescriptorProto obj, ref object state) + { + var name = obj?.Options?.GetOptions()?.ExtensionTypeName; + if (string.IsNullOrWhiteSpace(name)) name = "Extensions"; + ctx.WriteLine($"{GetAccess(GetAccess(obj))} static class {Escape(name)}").WriteLine("{").Indent(); + } + /// + /// Ends an extensions block + /// + protected override void WriteExtensionsFooter(GeneratorContext ctx, DescriptorProto obj, ref object state) + { + ctx.Outdent().WriteLine("}"); + } + /// + /// Write an extension + /// + protected override void WriteExtension(GeneratorContext ctx, FieldDescriptorProto field) + { + string dataFormat; + bool isMap; + var type = GetTypeName(ctx, field, out dataFormat, out isMap); + + if (isMap) + { + ctx.WriteLine("#error map extensions not yet implemented"); + } + else if (field.label == FieldDescriptorProto.Label.LabelRepeated) + { + ctx.WriteLine("#error repeated extensions not yet implemented"); + } + else + { + var msg = ctx.TryFind(field.Extendee); + var extendee = MakeRelativeName(field, msg, ctx.NameNormalizer); + + var @this = field.Parent is FileDescriptorProto ? "this " : ""; + string name = ctx.NameNormalizer.GetName(field); + var tw = ctx.WriteLine($"{GetAccess(GetAccess(field))} static {type} Get{name}({@this}{extendee} obj)") + .Write($"=> obj == null ? default({type}) : global::ProtoBuf.Extensible.GetValue<{type}>(obj, {field.Number}"); + if (!string.IsNullOrEmpty(dataFormat)) + { + tw.Write($", global::ProtoBuf.DataFormat.{dataFormat}"); + } + tw.WriteLine(");"); + ctx.WriteLine(); + // GetValue(IExtensible instance, int tag, DataFormat format) + } + } + + private static bool UseArray(FieldDescriptorProto field) + { + switch (field.type) + { + case FieldDescriptorProto.Type.TypeBool: + case FieldDescriptorProto.Type.TypeDouble: + case FieldDescriptorProto.Type.TypeFixed32: + case FieldDescriptorProto.Type.TypeFixed64: + case FieldDescriptorProto.Type.TypeFloat: + case FieldDescriptorProto.Type.TypeInt32: + case FieldDescriptorProto.Type.TypeInt64: + case FieldDescriptorProto.Type.TypeSfixed32: + case FieldDescriptorProto.Type.TypeSfixed64: + case FieldDescriptorProto.Type.TypeSint32: + case FieldDescriptorProto.Type.TypeSint64: + case FieldDescriptorProto.Type.TypeUint32: + case FieldDescriptorProto.Type.TypeUint64: + return true; + default: + return false; + } + } + + private string GetTypeName(GeneratorContext ctx, FieldDescriptorProto field, out string dataFormat, out bool isMap) + { + dataFormat = ""; + isMap = false; + switch (field.type) + { + case FieldDescriptorProto.Type.TypeDouble: + return "double"; + case FieldDescriptorProto.Type.TypeFloat: + return "float"; + case FieldDescriptorProto.Type.TypeBool: + return "bool"; + case FieldDescriptorProto.Type.TypeString: + return "string"; + case FieldDescriptorProto.Type.TypeSint32: + dataFormat = nameof(DataFormat.ZigZag); + return "int"; + case FieldDescriptorProto.Type.TypeInt32: + return "int"; + case FieldDescriptorProto.Type.TypeSfixed32: + dataFormat = nameof(DataFormat.FixedSize); + return "int"; + case FieldDescriptorProto.Type.TypeSint64: + dataFormat = nameof(DataFormat.ZigZag); + return "long"; + case FieldDescriptorProto.Type.TypeInt64: + return "long"; + case FieldDescriptorProto.Type.TypeSfixed64: + dataFormat = nameof(DataFormat.FixedSize); + return "long"; + case FieldDescriptorProto.Type.TypeFixed32: + dataFormat = nameof(DataFormat.FixedSize); + return "uint"; + case FieldDescriptorProto.Type.TypeUint32: + return "uint"; + case FieldDescriptorProto.Type.TypeFixed64: + dataFormat = nameof(DataFormat.FixedSize); + return "ulong"; + case FieldDescriptorProto.Type.TypeUint64: + return "ulong"; + case FieldDescriptorProto.Type.TypeBytes: + return "byte[]"; + case FieldDescriptorProto.Type.TypeEnum: + switch (field.TypeName) + { + case ".bcl.DateTime.DateTimeKind": + return "global::System.DateTimeKind"; + } + var enumType = ctx.TryFind(field.TypeName); + return MakeRelativeName(field, enumType, ctx.NameNormalizer); + case FieldDescriptorProto.Type.TypeGroup: + case FieldDescriptorProto.Type.TypeMessage: + switch (field.TypeName) + { + case WellKnownTypeTimestamp: + dataFormat = "WellKnown"; + return "global::System.DateTime?"; + case WellKnownTypeDuration: + dataFormat = "WellKnown"; + return "global::System.TimeSpan?"; + case ".bcl.NetObjectProxy": + return "object"; + case ".bcl.DateTime": + return "global::System.DateTime?"; + case ".bcl.TimeSpan": + return "global::System.TimeSpan?"; + case ".bcl.Decimal": + return "decimal?"; + case ".bcl.Guid": + return "global::System.Guid?"; + } + var msgType = ctx.TryFind(field.TypeName); + if (field.type == FieldDescriptorProto.Type.TypeGroup) + { + dataFormat = nameof(DataFormat.Group); + } + isMap = msgType?.Options?.MapEntry ?? false; + return MakeRelativeName(field, msgType, ctx.NameNormalizer); + default: + return field.TypeName; + } + } + + private string MakeRelativeName(FieldDescriptorProto field, IType target, NameNormalizer normalizer) + { + if (target == null) return Escape(field.TypeName); // the only thing we know + + var declaringType = field.Parent; + + if (declaringType is IType) + { + var name = FindNameFromCommonAncestor((IType)declaringType, target, normalizer); + if (!string.IsNullOrWhiteSpace(name)) return name; + } + return Escape(field.TypeName); // give up! + } + + // k, what we do is; we have two types; each knows the parent, but nothing else, so: + // for each, use a stack to build the ancestry tree - the "top" of the stack will be the + // package, the bottom of the stack will be the type itself. They will often be stacks + // of different heights. + // + // Find how many is in the smallest stack; now take that many items, in turn, until we + // get something that is different (at which point, put that one back on the stack), or + // we run out of items in one of the stacks. + // + // There are now two options: + // - we ran out of things in the "target" stack - in which case, they are common enough to not + // need any resolution - just give back the fixed name + // - we have things left in the "target" stack - in which case we have found a common ancestor, + // or the target is a descendent; either way, just concat what is left (including the package + // if the package itself was different) + + private string FindNameFromCommonAncestor(IType declaring, IType target, NameNormalizer normalizer) + { + // trivial case; asking for self, or asking for immediate child + if (ReferenceEquals(declaring, target) || ReferenceEquals(declaring, target.Parent)) + { + if (target is DescriptorProto) return Escape(normalizer.GetName((DescriptorProto)target)); + if (target is EnumDescriptorProto) return Escape(normalizer.GetName((EnumDescriptorProto)target)); + return null; + } + + var origTarget = target; + var xStack = new Stack(); + + while (declaring != null) + { + xStack.Push(declaring); + declaring = declaring.Parent; + } + var yStack = new Stack(); + + while (target != null) + { + yStack.Push(target); + target = target.Parent; + } + int lim = Math.Min(xStack.Count, yStack.Count); + for (int i = 0; i < lim; i++) + { + declaring = xStack.Peek(); + target = yStack.Pop(); + if (!ReferenceEquals(target, declaring)) + { + // special-case: if both are the package (file), and they have the same namespace: we're OK + if (target is FileDescriptorProto && declaring is FileDescriptorProto && + normalizer.GetName((FileDescriptorProto)declaring) == normalizer.GetName((FileDescriptorProto)target)) + { + // that's fine, keep going + } + else + { + // put it back + yStack.Push(target); + break; + } + } + } + // if we used everything, then the target is an ancestor-or-self + if (yStack.Count == 0) + { + target = origTarget; + if (target is DescriptorProto) return Escape(normalizer.GetName((DescriptorProto)target)); + if (target is EnumDescriptorProto) return Escape(normalizer.GetName((EnumDescriptorProto)target)); + return null; + } + + var sb = new StringBuilder(); + while (yStack.Count != 0) + { + target = yStack.Pop(); + + string nextName; + if (target is FileDescriptorProto) nextName = normalizer.GetName((FileDescriptorProto)target); + else if (target is DescriptorProto) nextName = normalizer.GetName((DescriptorProto)target); + else if (target is EnumDescriptorProto) nextName = normalizer.GetName((EnumDescriptorProto)target); + else return null; + + if (!string.IsNullOrWhiteSpace(nextName)) + { + if (sb.Length == 0 && target is FileDescriptorProto) sb.Append("global::"); + else if (sb.Length != 0) sb.Append('.'); + sb.Append(Escape(nextName)); + } + } + return sb.ToString(); + } + + static bool IsAncestorOrSelf(IType parent, IType child) + { + while (parent != null) + { + if (ReferenceEquals(parent, child)) return true; + parent = parent.Parent; + } + return false; + } + const string WellKnownTypeTimestamp = ".google.protobuf.Timestamp", + WellKnownTypeDuration = ".google.protobuf.Duration"; + } +} diff --git a/Editor/protobuf-net.Reflection/CSharpCodeGenerator.cs.meta b/Editor/protobuf-net.Reflection/CSharpCodeGenerator.cs.meta new file mode 100644 index 0000000..e66ae13 --- /dev/null +++ b/Editor/protobuf-net.Reflection/CSharpCodeGenerator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 8f18de68ea6964dc78de6459348acefe +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/CodeGenerator.OneOfStub.cs b/Editor/protobuf-net.Reflection/CodeGenerator.OneOfStub.cs new file mode 100644 index 0000000..cbf438d --- /dev/null +++ b/Editor/protobuf-net.Reflection/CodeGenerator.OneOfStub.cs @@ -0,0 +1,158 @@ +using Google.Protobuf.Reflection; + +namespace ProtoBuf.Reflection +{ + partial class CommonCodeGenerator + { + /// + /// Represents the union summary of a one-of declaration + /// + protected class OneOfStub + { + /// + /// The underlying descriptor + /// + public OneofDescriptorProto OneOf { get; } + + internal OneOfStub(OneofDescriptorProto decl) + { + OneOf = decl; + } + internal int Count32 { get; private set; } + internal int Count64 { get; private set; } + internal int Count128 { get; private set; } + internal int CountRef { get; private set; } + internal int CountTotal => CountRef + Count32 + Count64; + + private void AccountFor(FieldDescriptorProto.Type type, string typeName) + { + switch (type) + { + case FieldDescriptorProto.Type.TypeBool: + case FieldDescriptorProto.Type.TypeEnum: + case FieldDescriptorProto.Type.TypeFixed32: + case FieldDescriptorProto.Type.TypeFloat: + case FieldDescriptorProto.Type.TypeInt32: + case FieldDescriptorProto.Type.TypeSfixed32: + case FieldDescriptorProto.Type.TypeSint32: + case FieldDescriptorProto.Type.TypeUint32: + Count32++; + break; + case FieldDescriptorProto.Type.TypeDouble: + case FieldDescriptorProto.Type.TypeFixed64: + case FieldDescriptorProto.Type.TypeInt64: + case FieldDescriptorProto.Type.TypeSfixed64: + case FieldDescriptorProto.Type.TypeSint64: + case FieldDescriptorProto.Type.TypeUint64: + Count32++; + Count64++; + break; + case FieldDescriptorProto.Type.TypeMessage: + switch(typeName) + { + case ".google.protobuf.Timestamp": + case ".google.protobuf.Duration": + Count64++; + break; + case ".bcl.Guid": + Count128++; + break; + default: + CountRef++; + break; + } + break; + default: + CountRef++; + break; + } + } + internal string GetStorage(FieldDescriptorProto.Type type, string typeName) + { + switch (type) + { + case FieldDescriptorProto.Type.TypeBool: + return "Boolean"; + case FieldDescriptorProto.Type.TypeInt32: + case FieldDescriptorProto.Type.TypeSfixed32: + case FieldDescriptorProto.Type.TypeSint32: + case FieldDescriptorProto.Type.TypeFixed32: + case FieldDescriptorProto.Type.TypeEnum: + return "Int32"; + case FieldDescriptorProto.Type.TypeFloat: + return "Single"; + case FieldDescriptorProto.Type.TypeUint32: + return "UInt32"; + case FieldDescriptorProto.Type.TypeDouble: + return "Double"; + case FieldDescriptorProto.Type.TypeFixed64: + case FieldDescriptorProto.Type.TypeInt64: + case FieldDescriptorProto.Type.TypeSfixed64: + case FieldDescriptorProto.Type.TypeSint64: + return "Int64"; + case FieldDescriptorProto.Type.TypeUint64: + return "UInt64"; + case FieldDescriptorProto.Type.TypeMessage: + switch (typeName) + { + case ".google.protobuf.Timestamp": + return "DateTime"; + case ".google.protobuf.Duration": + return "TimeSpan"; + case ".bcl.Guid": + return "Guid"; + default: + return "Object"; + } + default: + return "Object"; + } + } + internal static OneOfStub[] Build(GeneratorContext context, DescriptorProto message) + { + if (message.OneofDecls.Count == 0) return null; + var stubs = new OneOfStub[message.OneofDecls.Count]; + int index = 0; + foreach (var decl in message.OneofDecls) + { + stubs[index++] = new OneOfStub(decl); + } + foreach (var field in message.Fields) + { + if (field.ShouldSerializeOneofIndex()) + { + stubs[field.OneofIndex].AccountFor(field.type, field.TypeName); + } + } + return stubs; + } + private bool isFirst = true; + internal bool IsFirst() + { + if (isFirst) + { + isFirst = false; + return true; + } + return false; + } + + internal string GetUnionType() + { + if (Count128 != 0) + { + return CountRef == 0 ? "DiscriminatedUnion128" : "DiscriminatedUnion128Object"; + } + if (Count64 != 0) + { + return CountRef == 0 ? "DiscriminatedUnion64" : "DiscriminatedUnion64Object"; + } + if (Count32 != 0) + { + return CountRef == 0 ? "DiscriminatedUnion32" : "DiscriminatedUnion32Object"; + } + return "DiscriminatedUnionObject"; + } + } + } +} diff --git a/Editor/protobuf-net.Reflection/CodeGenerator.OneOfStub.cs.meta b/Editor/protobuf-net.Reflection/CodeGenerator.OneOfStub.cs.meta new file mode 100644 index 0000000..c7bc17c --- /dev/null +++ b/Editor/protobuf-net.Reflection/CodeGenerator.OneOfStub.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: ff61418fb53f3488c9fe4974974b436e +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/CodeGenerator.cs b/Editor/protobuf-net.Reflection/CodeGenerator.cs new file mode 100644 index 0000000..a8d848e --- /dev/null +++ b/Editor/protobuf-net.Reflection/CodeGenerator.cs @@ -0,0 +1,476 @@ +using Google.Protobuf.Reflection; +using System; +using System.Collections.Generic; +using System.IO; + +namespace ProtoBuf.Reflection +{ + /// + /// Abstract root for a general purpose code-generator + /// + public abstract class CodeGenerator + { + /// + /// The logical name of this code generator + /// + public abstract string Name { get; } + /// + /// Get a string representation of the instance + /// + public override string ToString() => Name; + + /// + /// Execute the code generator against a FileDescriptorSet, yielding a sequence of files + /// + public abstract IEnumerable Generate(FileDescriptorSet set, NameNormalizer normalizer = null); + + /// + /// Eexecute this code generator against a code file + /// + public CompilerResult Compile(CodeFile file) => Compile(new[] { file }); + /// + /// Eexecute this code generator against a set of code file + /// + public CompilerResult Compile(params CodeFile[] files) + { + var set = new FileDescriptorSet(); + foreach (var file in files) + { + using (var reader = new StringReader(file.Text)) + { + Console.WriteLine($"Parsing {file.Name}..."); + set.Add(file.Name, true, reader); + } + } + set.Process(); + var results = new List(); + var newErrors = new List(); + + try + { + results.AddRange(Generate(set)); + } + catch (Exception ex) + { + set.Errors.Add(new Error(default(Token), ex.Message, true)); + } + var errors = set.GetErrors(); + + return new CompilerResult(errors, results.ToArray()); + } + } + /// + /// Abstract base class for a code generator that uses a visitor pattern + /// + public abstract partial class CommonCodeGenerator : CodeGenerator + { + private Access? GetAccess(IType parent) + { + if (parent is DescriptorProto) + return GetAccess((DescriptorProto)parent); + if (parent is EnumDescriptorProto) + return GetAccess((EnumDescriptorProto)parent); + if (parent is FileDescriptorProto) + return GetAccess((FileDescriptorProto)parent); + return null; + } + /// + /// Obtain the access of an item, accounting for the model's hierarchy + /// + protected Access GetAccess(FileDescriptorProto obj) + => obj?.Options?.GetOptions()?.Access ?? Access.Public; + + static Access? NullIfInherit(Access? access) + => access == Access.Inherit ? null : access; + /// + /// Obtain the access of an item, accounting for the model's hierarchy + /// + protected Access GetAccess(DescriptorProto obj) + => NullIfInherit(obj?.Options?.GetOptions()?.Access) + ?? GetAccess(obj?.Parent) ?? Access.Public; + /// + /// Obtain the access of an item, accounting for the model's hierarchy + /// + protected Access GetAccess(FieldDescriptorProto obj) + => NullIfInherit(obj?.Options?.GetOptions()?.Access) + ?? GetAccess(obj?.Parent as IType) ?? Access.Public; + /// + /// Obtain the access of an item, accounting for the model's hierarchy + /// + protected Access GetAccess(EnumDescriptorProto obj) + => NullIfInherit(obj?.Options?.GetOptions()?.Access) + ?? GetAccess(obj?.Parent) ?? Access.Public; + /// + /// Get the textual name of a given access level + /// + public virtual string GetAccess(Access access) + => access.ToString(); + + /// + /// The indentation used by this code generator + /// + public virtual string Indent => " "; + /// + /// The file extension of the files generatred by this generator + /// + protected abstract string DefaultFileExtension { get; } + /// + /// Handle keyword escaping in the language of this code generator + /// + /// + /// + protected abstract string Escape(string identifier); + /// + /// Execute the code generator against a FileDescriptorSet, yielding a sequence of files + /// + public override IEnumerable Generate(FileDescriptorSet set, NameNormalizer normalizer = null) + { + foreach (var file in set.Files) + { + if (!file.IncludeInOutput) continue; + + var fileName = Path.ChangeExtension(file.Name, DefaultFileExtension); + + string generated; + using (var buffer = new StringWriter()) + { + var ctx = new GeneratorContext(file, normalizer ?? NameNormalizer.Default, buffer, Indent); + + ctx.BuildTypeIndex(); // populates for TryFind + WriteFile(ctx, file); + generated = buffer.ToString(); + } + yield return new CodeFile(fileName, generated); + + } + + } + + /// + /// Emits the code for a file in a descriptor-set + /// + protected virtual void WriteFile(GeneratorContext ctx, FileDescriptorProto obj) + { + var file = ctx.File; + object state = null; + WriteFileHeader(ctx, obj, ref state); + + foreach (var inner in file.MessageTypes) + { + WriteMessage(ctx, inner); + } + foreach (var inner in file.EnumTypes) + { + WriteEnum(ctx, inner); + } + foreach (var inner in file.Services) + { + WriteService(ctx, inner); + } + if(file.Extensions.Count != 0) + { + object extState = null; + WriteExtensionsHeader(ctx, file, ref extState); + foreach(var ext in file.Extensions) + { + WriteExtension(ctx, ext); + } + WriteExtensionsFooter(ctx, file, ref extState); + } + WriteFileFooter(ctx, obj, ref state); + } + /// + /// Emit code representing an extension field + /// + protected virtual void WriteExtension(GeneratorContext ctx, FieldDescriptorProto ext) { } + /// + /// Emit code preceeding a set of extension fields + /// + protected virtual void WriteExtensionsHeader(GeneratorContext ctx, FileDescriptorProto file, ref object state) { } + /// + /// Emit code following a set of extension fields + /// + protected virtual void WriteExtensionsFooter(GeneratorContext ctx, FileDescriptorProto file, ref object state) { } + /// + /// Emit code preceeding a set of extension fields + /// + protected virtual void WriteExtensionsHeader(GeneratorContext ctx, DescriptorProto file, ref object state) { } + /// + /// Emit code following a set of extension fields + /// + protected virtual void WriteExtensionsFooter(GeneratorContext ctx, DescriptorProto file, ref object state) { } + /// + /// Emit code representing a service + /// + protected virtual void WriteService(GeneratorContext ctx, ServiceDescriptorProto obj) + { + object state = null; + WriteServiceHeader(ctx, obj, ref state); + foreach (var inner in obj.Methods) + { + WriteServiceMethod(ctx, inner, ref state); + } + WriteServiceFooter(ctx, obj, ref state); + } + /// + /// Emit code following a set of service methods + /// + protected virtual void WriteServiceFooter(GeneratorContext ctx, ServiceDescriptorProto obj, ref object state) { } + + /// + /// Emit code representing a service method + /// + protected virtual void WriteServiceMethod(GeneratorContext ctx, MethodDescriptorProto inner, ref object state) { } + /// + /// Emit code following preceeding a set of service methods + /// + protected virtual void WriteServiceHeader(GeneratorContext ctx, ServiceDescriptorProto obj, ref object state) { } + /// + /// Check whether a particular message should be suppressed - for example because it represents a map + /// + protected virtual bool ShouldOmitMessage(GeneratorContext ctx, DescriptorProto obj, ref object state) + => obj.Options?.MapEntry ?? false; // don't write this type - use a dictionary instead + + /// + /// Emit code representing a message type + /// + protected virtual void WriteMessage(GeneratorContext ctx, DescriptorProto obj) + { + object state = null; + if (ShouldOmitMessage(ctx, obj, ref state)) return; + + WriteMessageHeader(ctx, obj, ref state); + var oneOfs = OneOfStub.Build(ctx, obj); + foreach (var inner in obj.Fields) + { + WriteField(ctx, inner, ref state, oneOfs); + } + foreach (var inner in obj.NestedTypes) + { + WriteMessage(ctx, inner); + } + foreach (var inner in obj.EnumTypes) + { + WriteEnum(ctx, inner); + } + if (obj.Extensions.Count != 0) + { + object extState = null; + WriteExtensionsHeader(ctx, obj, ref extState); + foreach (var ext in obj.Extensions) + { + WriteExtension(ctx, ext); + } + WriteExtensionsFooter(ctx, obj, ref extState); + } + WriteMessageFooter(ctx, obj, ref state); + } + /// + /// Emit code representing a message field + /// + protected abstract void WriteField(GeneratorContext ctx, FieldDescriptorProto obj, ref object state, OneOfStub[] oneOfs); + /// + /// Emit code following a set of message fields + /// + protected abstract void WriteMessageFooter(GeneratorContext ctx, DescriptorProto obj, ref object state); + /// + /// Emit code preceeding a set of message fields + /// + protected abstract void WriteMessageHeader(GeneratorContext ctx, DescriptorProto obj, ref object state); + /// + /// Emit code representing an enum type + /// + protected virtual void WriteEnum(GeneratorContext ctx, EnumDescriptorProto obj) + { + object state = null; + WriteEnumHeader(ctx, obj, ref state); + foreach (var inner in obj.Values) + { + WriteEnumValue(ctx, inner, ref state); + } + WriteEnumFooter(ctx, obj, ref state); + } + + /// + /// Emit code preceeding a set of enum values + /// + protected abstract void WriteEnumHeader(GeneratorContext ctx, EnumDescriptorProto obj, ref object state); + /// + /// Emit code representing an enum value + /// + protected abstract void WriteEnumValue(GeneratorContext ctx, EnumValueDescriptorProto obj, ref object state); + /// + /// Emit code following a set of enum values + /// + protected abstract void WriteEnumFooter(GeneratorContext ctx, EnumDescriptorProto obj, ref object state); + /// + /// Emit code at the start of a file + /// + protected virtual void WriteFileHeader(GeneratorContext ctx, FileDescriptorProto obj, ref object state) { } + /// + /// Emit code at the end of a file + /// + protected virtual void WriteFileFooter(GeneratorContext ctx, FileDescriptorProto obj, ref object state) { } + + /// + /// Represents the state of a code-generation invocation + /// + protected class GeneratorContext + { + /// + /// The file being processed + /// + public FileDescriptorProto File { get; } + /// + /// The token to use for indentation + /// + public string IndentToken { get; } + /// + /// The current indent level + /// + public int IndentLevel { get; private set; } + /// + /// The mechanism to use for name normalization + /// + public NameNormalizer NameNormalizer { get; } + /// + /// The output for this code generation + /// + public TextWriter Output { get; } + /// + /// The effective syntax of this code-generation cycle, defaulting to "proto2" if not explicity specified + /// + public string Syntax => string.IsNullOrWhiteSpace(File.Syntax) ? FileDescriptorProto.SyntaxProto2 : File.Syntax; + /// + /// Create a new GeneratorContext instance + /// + internal GeneratorContext(FileDescriptorProto file, NameNormalizer nameNormalizer, TextWriter output, string indentToken) + { + File = file; + NameNormalizer = nameNormalizer; + Output = output; + IndentToken = indentToken; + } + + /// + /// Ends the current line + /// + public GeneratorContext WriteLine() + { + Output.WriteLine(); + return this; + } + /// + /// Appends a value and ends the current line + /// + public GeneratorContext WriteLine(string line) + { + var indentLevel = IndentLevel; + var target = Output; + while (indentLevel-- > 0) + { + target.Write(IndentToken); + } + target.WriteLine(line); + return this; + } + /// + /// Appends a value to the current line + /// + public TextWriter Write(string value) + { + var indentLevel = IndentLevel; + var target = Output; + while (indentLevel-- > 0) + { + target.Write(IndentToken); + } + target.Write(value); + return target; + } + /// + /// Increases the indentation level + /// + public GeneratorContext Indent() + { + IndentLevel++; + return this; + } + /// + /// Decreases the indentation level + /// + public GeneratorContext Outdent() + { + IndentLevel--; + return this; + } + + /// + /// Try to find a descriptor of the type specified by T with the given full name + /// + public T TryFind(string typeName) where T : class + { + object obj; + if (!_knownTypes.TryGetValue(typeName, out obj) || obj == null) + { + return null; + } + return obj as T; + } + + private Dictionary _knownTypes = new Dictionary(); + void AddMessage(DescriptorProto message) + { + _knownTypes[message.FullyQualifiedName] = message; + foreach (var @enum in message.EnumTypes) + { + _knownTypes[@enum.FullyQualifiedName] = @enum; + } + foreach (var msg in message.NestedTypes) + { + AddMessage(msg); + } + } + internal void BuildTypeIndex() + { + { + var processedFiles = new HashSet(StringComparer.OrdinalIgnoreCase); + var pendingFiles = new Queue(); + + _knownTypes.Clear(); + processedFiles.Add(File.Name); + pendingFiles.Enqueue(File); + + while (pendingFiles.Count != 0) + { + var file = pendingFiles.Dequeue(); + + foreach (var @enum in file.EnumTypes) + { + _knownTypes[@enum.FullyQualifiedName] = @enum; + } + foreach (var msg in file.MessageTypes) + { + AddMessage(msg); + } + + if (file.HasImports()) + { + foreach (var import in file.GetImports()) + { + if (processedFiles.Add(import.Path)) + { + var importFile = file.Parent.GetFile(import.Path); + if (importFile != null) pendingFiles.Enqueue(importFile); + } + } + } + + } + } + } + } + } + + +} diff --git a/Editor/protobuf-net.Reflection/CodeGenerator.cs.meta b/Editor/protobuf-net.Reflection/CodeGenerator.cs.meta new file mode 100644 index 0000000..237ebc9 --- /dev/null +++ b/Editor/protobuf-net.Reflection/CodeGenerator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b021246de9d37497cb1f97a9ed01772a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/CustomOptions.cs b/Editor/protobuf-net.Reflection/CustomOptions.cs new file mode 100644 index 0000000..13a1fe1 --- /dev/null +++ b/Editor/protobuf-net.Reflection/CustomOptions.cs @@ -0,0 +1,141 @@ +// This file was generated by a tool; you should avoid making direct changes. +// Consider using 'partial classes' to extend these types +// Input: protogen.proto + +#pragma warning disable CS1591, CS0612, CS3021 + +namespace ProtoBuf.Reflection +{ + + [global::ProtoBuf.ProtoContract()] + public partial class ProtogenFileOptions + { + [global::ProtoBuf.ProtoMember(1, Name = @"namespace")] + [global::System.ComponentModel.DefaultValue("")] + public string Namespace { get; set; } = ""; + + [global::ProtoBuf.ProtoMember(2, Name = @"access")] + public Access Access { get; set; } + + [global::ProtoBuf.ProtoMember(3, Name = @"extensions")] + [global::System.ComponentModel.DefaultValue("")] + public string ExtensionTypeName { get; set; } = ""; + + } + + [global::ProtoBuf.ProtoContract()] + public partial class ProtogenMessageOptions + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name { get; set; } = ""; + + [global::ProtoBuf.ProtoMember(2, Name = @"access")] + public Access Access { get; set; } + + [global::ProtoBuf.ProtoMember(3, Name = @"extensions")] + [global::System.ComponentModel.DefaultValue("")] + public string ExtensionTypeName { get; set; } = ""; + + } + + [global::ProtoBuf.ProtoContract()] + public partial class ProtogenFieldOptions + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name { get; set; } = ""; + + [global::ProtoBuf.ProtoMember(2, Name = @"access")] + public Access Access { get; set; } + + [global::ProtoBuf.ProtoMember(3, Name = @"asRef")] + public bool AsReference { get; set; } + + [global::ProtoBuf.ProtoMember(4, Name = @"dynamicType")] + public bool DynamicType { get; set; } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class ProtogenEnumOptions + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name { get; set; } = ""; + + [global::ProtoBuf.ProtoMember(2, Name = @"access")] + public Access Access { get; set; } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class ProtogenEnumValueOptions + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name { get; set; } = ""; + + } + + [global::ProtoBuf.ProtoContract()] + public partial class ProtogenServiceOptions + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name { get; set; } = ""; + + [global::ProtoBuf.ProtoMember(2, Name = @"access")] + public Access Access { get; set; } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class ProtogenMethodOptions + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name { get; set; } = ""; + + } + + [global::ProtoBuf.ProtoContract()] + public enum Access + { + [global::ProtoBuf.ProtoEnum(Name = @"INHERIT")] + Inherit = 0, + [global::ProtoBuf.ProtoEnum(Name = @"PUBLIC")] + Public = 1, + [global::ProtoBuf.ProtoEnum(Name = @"PRIVATE")] + Private = 2, + [global::ProtoBuf.ProtoEnum(Name = @"INTERNAL")] + Internal = 3, + } + + public static class Extensions + { + public static ProtogenFileOptions GetOptions(this global::Google.Protobuf.Reflection.FileOptions obj) + => obj == null ? default(ProtogenFileOptions) : global::ProtoBuf.Extensible.GetValue(obj, 1037); + + public static ProtogenMessageOptions GetOptions(this global::Google.Protobuf.Reflection.MessageOptions obj) + => obj == null ? default(ProtogenMessageOptions) : global::ProtoBuf.Extensible.GetValue(obj, 1037); + + public static ProtogenFieldOptions GetOptions(this global::Google.Protobuf.Reflection.FieldOptions obj) + => obj == null ? default(ProtogenFieldOptions) : global::ProtoBuf.Extensible.GetValue(obj, 1037); + + public static ProtogenEnumOptions GetOptions(this global::Google.Protobuf.Reflection.EnumOptions obj) + => obj == null ? default(ProtogenEnumOptions) : global::ProtoBuf.Extensible.GetValue(obj, 1037); + + public static ProtogenEnumValueOptions GetOptions(this global::Google.Protobuf.Reflection.EnumValueOptions obj) + => obj == null ? default(ProtogenEnumValueOptions) : global::ProtoBuf.Extensible.GetValue(obj, 1037); + + public static ProtogenServiceOptions GetOptions(this global::Google.Protobuf.Reflection.ServiceOptions obj) + => obj == null ? default(ProtogenServiceOptions) : global::ProtoBuf.Extensible.GetValue(obj, 1037); + + public static ProtogenMethodOptions GetOptions(this global::Google.Protobuf.Reflection.MethodOptions obj) + => obj == null ? default(ProtogenMethodOptions) : global::ProtoBuf.Extensible.GetValue(obj, 1037); + + } +} + +#pragma warning restore CS1591, CS0612, CS3021 diff --git a/Editor/protobuf-net.Reflection/CustomOptions.cs.meta b/Editor/protobuf-net.Reflection/CustomOptions.cs.meta new file mode 100644 index 0000000..fd77e0b --- /dev/null +++ b/Editor/protobuf-net.Reflection/CustomOptions.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: c7a900f1d35ef45bd8dd1a9708204ef2 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/Descriptor.cs b/Editor/protobuf-net.Reflection/Descriptor.cs new file mode 100644 index 0000000..91efe2e --- /dev/null +++ b/Editor/protobuf-net.Reflection/Descriptor.cs @@ -0,0 +1,1149 @@ +// This file was generated by a tool; you should avoid making direct changes. +// Consider using 'partial classes' to extend these types +// Input: descriptor.proto + +#pragma warning disable CS1591, CS0612, CS3021 + +namespace Google.Protobuf.Reflection +{ + + [global::ProtoBuf.ProtoContract()] + public partial class FileDescriptorSet + { + [global::ProtoBuf.ProtoMember(1, Name = @"file")] + public global::System.Collections.Generic.List Files { get; } = new global::System.Collections.Generic.List(); + + } + + [global::ProtoBuf.ProtoContract()] + public partial class FileDescriptorProto + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name + { + get { return __pbn__Name ?? ""; } + set { __pbn__Name = value; } + } + public bool ShouldSerializeName() => __pbn__Name != null; + public void ResetName() => __pbn__Name = null; + private string __pbn__Name; + + [global::ProtoBuf.ProtoMember(2, Name = @"package")] + [global::System.ComponentModel.DefaultValue("")] + public string Package + { + get { return __pbn__Package ?? ""; } + set { __pbn__Package = value; } + } + public bool ShouldSerializePackage() => __pbn__Package != null; + public void ResetPackage() => __pbn__Package = null; + private string __pbn__Package; + + [global::ProtoBuf.ProtoMember(3, Name = @"dependency")] + public global::System.Collections.Generic.List Dependencies { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(10, Name = @"public_dependency")] + public int[] PublicDependencies { get; set; } + + [global::ProtoBuf.ProtoMember(11, Name = @"weak_dependency")] + public int[] WeakDependencies { get; set; } + + [global::ProtoBuf.ProtoMember(4, Name = @"message_type")] + public global::System.Collections.Generic.List MessageTypes { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(5, Name = @"enum_type")] + public global::System.Collections.Generic.List EnumTypes { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(6, Name = @"service")] + public global::System.Collections.Generic.List Services { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(7, Name = @"extension")] + public global::System.Collections.Generic.List Extensions { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(8, Name = @"options")] + public FileOptions Options { get; set; } + + [global::ProtoBuf.ProtoMember(9, Name = @"source_code_info")] + public SourceCodeInfo SourceCodeInfo { get; set; } + + [global::ProtoBuf.ProtoMember(12, Name = @"syntax")] + [global::System.ComponentModel.DefaultValue("")] + public string Syntax + { + get { return __pbn__Syntax ?? ""; } + set { __pbn__Syntax = value; } + } + public bool ShouldSerializeSyntax() => __pbn__Syntax != null; + public void ResetSyntax() => __pbn__Syntax = null; + private string __pbn__Syntax; + + } + + [global::ProtoBuf.ProtoContract()] + public partial class DescriptorProto + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name + { + get { return __pbn__Name ?? ""; } + set { __pbn__Name = value; } + } + public bool ShouldSerializeName() => __pbn__Name != null; + public void ResetName() => __pbn__Name = null; + private string __pbn__Name; + + [global::ProtoBuf.ProtoMember(2, Name = @"field")] + public global::System.Collections.Generic.List Fields { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(6, Name = @"extension")] + public global::System.Collections.Generic.List Extensions { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(3, Name = @"nested_type")] + public global::System.Collections.Generic.List NestedTypes { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(4, Name = @"enum_type")] + public global::System.Collections.Generic.List EnumTypes { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(5, Name = @"extension_range")] + public global::System.Collections.Generic.List ExtensionRanges { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(8, Name = @"oneof_decl")] + public global::System.Collections.Generic.List OneofDecls { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(7, Name = @"options")] + public MessageOptions Options { get; set; } + + [global::ProtoBuf.ProtoMember(9, Name = @"reserved_range")] + public global::System.Collections.Generic.List ReservedRanges { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(10, Name = @"reserved_name")] + public global::System.Collections.Generic.List ReservedNames { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoContract()] + public partial class ExtensionRange + { + [global::ProtoBuf.ProtoMember(1, Name = @"start")] + public int Start + { + get { return __pbn__Start.GetValueOrDefault(); } + set { __pbn__Start = value; } + } + public bool ShouldSerializeStart() => __pbn__Start != null; + public void ResetStart() => __pbn__Start = null; + private int? __pbn__Start; + + [global::ProtoBuf.ProtoMember(2, Name = @"end")] + public int End + { + get { return __pbn__End.GetValueOrDefault(); } + set { __pbn__End = value; } + } + public bool ShouldSerializeEnd() => __pbn__End != null; + public void ResetEnd() => __pbn__End = null; + private int? __pbn__End; + + } + + [global::ProtoBuf.ProtoContract()] + public partial class ReservedRange + { + [global::ProtoBuf.ProtoMember(1, Name = @"start")] + public int Start + { + get { return __pbn__Start.GetValueOrDefault(); } + set { __pbn__Start = value; } + } + public bool ShouldSerializeStart() => __pbn__Start != null; + public void ResetStart() => __pbn__Start = null; + private int? __pbn__Start; + + [global::ProtoBuf.ProtoMember(2, Name = @"end")] + public int End + { + get { return __pbn__End.GetValueOrDefault(); } + set { __pbn__End = value; } + } + public bool ShouldSerializeEnd() => __pbn__End != null; + public void ResetEnd() => __pbn__End = null; + private int? __pbn__End; + + } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class FieldDescriptorProto + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name + { + get { return __pbn__Name ?? ""; } + set { __pbn__Name = value; } + } + public bool ShouldSerializeName() => __pbn__Name != null; + public void ResetName() => __pbn__Name = null; + private string __pbn__Name; + + [global::ProtoBuf.ProtoMember(3, Name = @"number")] + public int Number + { + get { return __pbn__Number.GetValueOrDefault(); } + set { __pbn__Number = value; } + } + public bool ShouldSerializeNumber() => __pbn__Number != null; + public void ResetNumber() => __pbn__Number = null; + private int? __pbn__Number; + + [global::ProtoBuf.ProtoMember(4)] + public Label label + { + get { return __pbn__label.GetValueOrDefault(); } + set { __pbn__label = value; } + } + public bool ShouldSerializelabel() => __pbn__label != null; + public void Resetlabel() => __pbn__label = null; + private Label? __pbn__label; + + [global::ProtoBuf.ProtoMember(5)] + public Type type + { + get { return __pbn__type.GetValueOrDefault(); } + set { __pbn__type = value; } + } + public bool ShouldSerializetype() => __pbn__type != null; + public void Resettype() => __pbn__type = null; + private Type? __pbn__type; + + [global::ProtoBuf.ProtoMember(6, Name = @"type_name")] + [global::System.ComponentModel.DefaultValue("")] + public string TypeName + { + get { return __pbn__TypeName ?? ""; } + set { __pbn__TypeName = value; } + } + public bool ShouldSerializeTypeName() => __pbn__TypeName != null; + public void ResetTypeName() => __pbn__TypeName = null; + private string __pbn__TypeName; + + [global::ProtoBuf.ProtoMember(2, Name = @"extendee")] + [global::System.ComponentModel.DefaultValue("")] + public string Extendee + { + get { return __pbn__Extendee ?? ""; } + set { __pbn__Extendee = value; } + } + public bool ShouldSerializeExtendee() => __pbn__Extendee != null; + public void ResetExtendee() => __pbn__Extendee = null; + private string __pbn__Extendee; + + [global::ProtoBuf.ProtoMember(7, Name = @"default_value")] + [global::System.ComponentModel.DefaultValue("")] + public string DefaultValue + { + get { return __pbn__DefaultValue ?? ""; } + set { __pbn__DefaultValue = value; } + } + public bool ShouldSerializeDefaultValue() => __pbn__DefaultValue != null; + public void ResetDefaultValue() => __pbn__DefaultValue = null; + private string __pbn__DefaultValue; + + [global::ProtoBuf.ProtoMember(9, Name = @"oneof_index")] + public int OneofIndex + { + get { return __pbn__OneofIndex.GetValueOrDefault(); } + set { __pbn__OneofIndex = value; } + } + public bool ShouldSerializeOneofIndex() => __pbn__OneofIndex != null; + public void ResetOneofIndex() => __pbn__OneofIndex = null; + private int? __pbn__OneofIndex; + + [global::ProtoBuf.ProtoMember(10, Name = @"json_name")] + [global::System.ComponentModel.DefaultValue("")] + public string JsonName + { + get { return __pbn__JsonName ?? ""; } + set { __pbn__JsonName = value; } + } + public bool ShouldSerializeJsonName() => __pbn__JsonName != null; + public void ResetJsonName() => __pbn__JsonName = null; + private string __pbn__JsonName; + + [global::ProtoBuf.ProtoMember(8, Name = @"options")] + public FieldOptions Options { get; set; } + + [global::ProtoBuf.ProtoContract()] + public enum Type + { + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_DOUBLE")] + TypeDouble = 1, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_FLOAT")] + TypeFloat = 2, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_INT64")] + TypeInt64 = 3, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_UINT64")] + TypeUint64 = 4, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_INT32")] + TypeInt32 = 5, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_FIXED64")] + TypeFixed64 = 6, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_FIXED32")] + TypeFixed32 = 7, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_BOOL")] + TypeBool = 8, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_STRING")] + TypeString = 9, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_GROUP")] + TypeGroup = 10, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_MESSAGE")] + TypeMessage = 11, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_BYTES")] + TypeBytes = 12, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_UINT32")] + TypeUint32 = 13, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_ENUM")] + TypeEnum = 14, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_SFIXED32")] + TypeSfixed32 = 15, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_SFIXED64")] + TypeSfixed64 = 16, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_SINT32")] + TypeSint32 = 17, + [global::ProtoBuf.ProtoEnum(Name = @"TYPE_SINT64")] + TypeSint64 = 18, + } + + [global::ProtoBuf.ProtoContract()] + public enum Label + { + [global::ProtoBuf.ProtoEnum(Name = @"LABEL_OPTIONAL")] + LabelOptional = 1, + [global::ProtoBuf.ProtoEnum(Name = @"LABEL_REQUIRED")] + LabelRequired = 2, + [global::ProtoBuf.ProtoEnum(Name = @"LABEL_REPEATED")] + LabelRepeated = 3, + } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class OneofDescriptorProto + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name + { + get { return __pbn__Name ?? ""; } + set { __pbn__Name = value; } + } + public bool ShouldSerializeName() => __pbn__Name != null; + public void ResetName() => __pbn__Name = null; + private string __pbn__Name; + + [global::ProtoBuf.ProtoMember(2, Name = @"options")] + public OneofOptions Options { get; set; } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class EnumDescriptorProto + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name + { + get { return __pbn__Name ?? ""; } + set { __pbn__Name = value; } + } + public bool ShouldSerializeName() => __pbn__Name != null; + public void ResetName() => __pbn__Name = null; + private string __pbn__Name; + + [global::ProtoBuf.ProtoMember(2, Name = @"value")] + public global::System.Collections.Generic.List Values { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(3, Name = @"options")] + public EnumOptions Options { get; set; } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class EnumValueDescriptorProto + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name + { + get { return __pbn__Name ?? ""; } + set { __pbn__Name = value; } + } + public bool ShouldSerializeName() => __pbn__Name != null; + public void ResetName() => __pbn__Name = null; + private string __pbn__Name; + + [global::ProtoBuf.ProtoMember(2, Name = @"number")] + public int Number + { + get { return __pbn__Number.GetValueOrDefault(); } + set { __pbn__Number = value; } + } + public bool ShouldSerializeNumber() => __pbn__Number != null; + public void ResetNumber() => __pbn__Number = null; + private int? __pbn__Number; + + [global::ProtoBuf.ProtoMember(3, Name = @"options")] + public EnumValueOptions Options { get; set; } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class ServiceDescriptorProto + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name + { + get { return __pbn__Name ?? ""; } + set { __pbn__Name = value; } + } + public bool ShouldSerializeName() => __pbn__Name != null; + public void ResetName() => __pbn__Name = null; + private string __pbn__Name; + + [global::ProtoBuf.ProtoMember(2, Name = @"method")] + public global::System.Collections.Generic.List Methods { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(3, Name = @"options")] + public ServiceOptions Options { get; set; } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class MethodDescriptorProto + { + [global::ProtoBuf.ProtoMember(1, Name = @"name")] + [global::System.ComponentModel.DefaultValue("")] + public string Name + { + get { return __pbn__Name ?? ""; } + set { __pbn__Name = value; } + } + public bool ShouldSerializeName() => __pbn__Name != null; + public void ResetName() => __pbn__Name = null; + private string __pbn__Name; + + [global::ProtoBuf.ProtoMember(2, Name = @"input_type")] + [global::System.ComponentModel.DefaultValue("")] + public string InputType + { + get { return __pbn__InputType ?? ""; } + set { __pbn__InputType = value; } + } + public bool ShouldSerializeInputType() => __pbn__InputType != null; + public void ResetInputType() => __pbn__InputType = null; + private string __pbn__InputType; + + [global::ProtoBuf.ProtoMember(3, Name = @"output_type")] + [global::System.ComponentModel.DefaultValue("")] + public string OutputType + { + get { return __pbn__OutputType ?? ""; } + set { __pbn__OutputType = value; } + } + public bool ShouldSerializeOutputType() => __pbn__OutputType != null; + public void ResetOutputType() => __pbn__OutputType = null; + private string __pbn__OutputType; + + [global::ProtoBuf.ProtoMember(4, Name = @"options")] + public MethodOptions Options { get; set; } + + [global::ProtoBuf.ProtoMember(5, Name = @"client_streaming")] + [global::System.ComponentModel.DefaultValue(false)] + public bool ClientStreaming + { + get { return __pbn__ClientStreaming ?? false; } + set { __pbn__ClientStreaming = value; } + } + public bool ShouldSerializeClientStreaming() => __pbn__ClientStreaming != null; + public void ResetClientStreaming() => __pbn__ClientStreaming = null; + private bool? __pbn__ClientStreaming; + + [global::ProtoBuf.ProtoMember(6, Name = @"server_streaming")] + [global::System.ComponentModel.DefaultValue(false)] + public bool ServerStreaming + { + get { return __pbn__ServerStreaming ?? false; } + set { __pbn__ServerStreaming = value; } + } + public bool ShouldSerializeServerStreaming() => __pbn__ServerStreaming != null; + public void ResetServerStreaming() => __pbn__ServerStreaming = null; + private bool? __pbn__ServerStreaming; + + } + + [global::ProtoBuf.ProtoContract()] + public partial class FileOptions : global::ProtoBuf.IExtensible + { + private global::ProtoBuf.IExtension __pbn__extensionData; + global::ProtoBuf.IExtension global::ProtoBuf.IExtensible.GetExtensionObject(bool createIfMissing) + => global::ProtoBuf.Extensible.GetExtensionObject(ref __pbn__extensionData, createIfMissing); + + [global::ProtoBuf.ProtoMember(1, Name = @"java_package")] + [global::System.ComponentModel.DefaultValue("")] + public string JavaPackage + { + get { return __pbn__JavaPackage ?? ""; } + set { __pbn__JavaPackage = value; } + } + public bool ShouldSerializeJavaPackage() => __pbn__JavaPackage != null; + public void ResetJavaPackage() => __pbn__JavaPackage = null; + private string __pbn__JavaPackage; + + [global::ProtoBuf.ProtoMember(8, Name = @"java_outer_classname")] + [global::System.ComponentModel.DefaultValue("")] + public string JavaOuterClassname + { + get { return __pbn__JavaOuterClassname ?? ""; } + set { __pbn__JavaOuterClassname = value; } + } + public bool ShouldSerializeJavaOuterClassname() => __pbn__JavaOuterClassname != null; + public void ResetJavaOuterClassname() => __pbn__JavaOuterClassname = null; + private string __pbn__JavaOuterClassname; + + [global::ProtoBuf.ProtoMember(10, Name = @"java_multiple_files")] + [global::System.ComponentModel.DefaultValue(false)] + public bool JavaMultipleFiles + { + get { return __pbn__JavaMultipleFiles ?? false; } + set { __pbn__JavaMultipleFiles = value; } + } + public bool ShouldSerializeJavaMultipleFiles() => __pbn__JavaMultipleFiles != null; + public void ResetJavaMultipleFiles() => __pbn__JavaMultipleFiles = null; + private bool? __pbn__JavaMultipleFiles; + + [global::ProtoBuf.ProtoMember(20, Name = @"java_generate_equals_and_hash")] + [global::System.Obsolete] + public bool JavaGenerateEqualsAndHash + { + get { return __pbn__JavaGenerateEqualsAndHash.GetValueOrDefault(); } + set { __pbn__JavaGenerateEqualsAndHash = value; } + } + public bool ShouldSerializeJavaGenerateEqualsAndHash() => __pbn__JavaGenerateEqualsAndHash != null; + public void ResetJavaGenerateEqualsAndHash() => __pbn__JavaGenerateEqualsAndHash = null; + private bool? __pbn__JavaGenerateEqualsAndHash; + + [global::ProtoBuf.ProtoMember(27, Name = @"java_string_check_utf8")] + [global::System.ComponentModel.DefaultValue(false)] + public bool JavaStringCheckUtf8 + { + get { return __pbn__JavaStringCheckUtf8 ?? false; } + set { __pbn__JavaStringCheckUtf8 = value; } + } + public bool ShouldSerializeJavaStringCheckUtf8() => __pbn__JavaStringCheckUtf8 != null; + public void ResetJavaStringCheckUtf8() => __pbn__JavaStringCheckUtf8 = null; + private bool? __pbn__JavaStringCheckUtf8; + + [global::ProtoBuf.ProtoMember(9, Name = @"optimize_for")] + [global::System.ComponentModel.DefaultValue(OptimizeMode.Speed)] + public OptimizeMode OptimizeFor + { + get { return __pbn__OptimizeFor ?? OptimizeMode.Speed; } + set { __pbn__OptimizeFor = value; } + } + public bool ShouldSerializeOptimizeFor() => __pbn__OptimizeFor != null; + public void ResetOptimizeFor() => __pbn__OptimizeFor = null; + private OptimizeMode? __pbn__OptimizeFor; + + [global::ProtoBuf.ProtoMember(11, Name = @"go_package")] + [global::System.ComponentModel.DefaultValue("")] + public string GoPackage + { + get { return __pbn__GoPackage ?? ""; } + set { __pbn__GoPackage = value; } + } + public bool ShouldSerializeGoPackage() => __pbn__GoPackage != null; + public void ResetGoPackage() => __pbn__GoPackage = null; + private string __pbn__GoPackage; + + [global::ProtoBuf.ProtoMember(16, Name = @"cc_generic_services")] + [global::System.ComponentModel.DefaultValue(false)] + public bool CcGenericServices + { + get { return __pbn__CcGenericServices ?? false; } + set { __pbn__CcGenericServices = value; } + } + public bool ShouldSerializeCcGenericServices() => __pbn__CcGenericServices != null; + public void ResetCcGenericServices() => __pbn__CcGenericServices = null; + private bool? __pbn__CcGenericServices; + + [global::ProtoBuf.ProtoMember(17, Name = @"java_generic_services")] + [global::System.ComponentModel.DefaultValue(false)] + public bool JavaGenericServices + { + get { return __pbn__JavaGenericServices ?? false; } + set { __pbn__JavaGenericServices = value; } + } + public bool ShouldSerializeJavaGenericServices() => __pbn__JavaGenericServices != null; + public void ResetJavaGenericServices() => __pbn__JavaGenericServices = null; + private bool? __pbn__JavaGenericServices; + + [global::ProtoBuf.ProtoMember(18, Name = @"py_generic_services")] + [global::System.ComponentModel.DefaultValue(false)] + public bool PyGenericServices + { + get { return __pbn__PyGenericServices ?? false; } + set { __pbn__PyGenericServices = value; } + } + public bool ShouldSerializePyGenericServices() => __pbn__PyGenericServices != null; + public void ResetPyGenericServices() => __pbn__PyGenericServices = null; + private bool? __pbn__PyGenericServices; + + [global::ProtoBuf.ProtoMember(23, Name = @"deprecated")] + [global::System.ComponentModel.DefaultValue(false)] + public bool Deprecated + { + get { return __pbn__Deprecated ?? false; } + set { __pbn__Deprecated = value; } + } + public bool ShouldSerializeDeprecated() => __pbn__Deprecated != null; + public void ResetDeprecated() => __pbn__Deprecated = null; + private bool? __pbn__Deprecated; + + [global::ProtoBuf.ProtoMember(31, Name = @"cc_enable_arenas")] + [global::System.ComponentModel.DefaultValue(false)] + public bool CcEnableArenas + { + get { return __pbn__CcEnableArenas ?? false; } + set { __pbn__CcEnableArenas = value; } + } + public bool ShouldSerializeCcEnableArenas() => __pbn__CcEnableArenas != null; + public void ResetCcEnableArenas() => __pbn__CcEnableArenas = null; + private bool? __pbn__CcEnableArenas; + + [global::ProtoBuf.ProtoMember(36, Name = @"objc_class_prefix")] + [global::System.ComponentModel.DefaultValue("")] + public string ObjcClassPrefix + { + get { return __pbn__ObjcClassPrefix ?? ""; } + set { __pbn__ObjcClassPrefix = value; } + } + public bool ShouldSerializeObjcClassPrefix() => __pbn__ObjcClassPrefix != null; + public void ResetObjcClassPrefix() => __pbn__ObjcClassPrefix = null; + private string __pbn__ObjcClassPrefix; + + [global::ProtoBuf.ProtoMember(37, Name = @"csharp_namespace")] + [global::System.ComponentModel.DefaultValue("")] + public string CsharpNamespace + { + get { return __pbn__CsharpNamespace ?? ""; } + set { __pbn__CsharpNamespace = value; } + } + public bool ShouldSerializeCsharpNamespace() => __pbn__CsharpNamespace != null; + public void ResetCsharpNamespace() => __pbn__CsharpNamespace = null; + private string __pbn__CsharpNamespace; + + [global::ProtoBuf.ProtoMember(39, Name = @"swift_prefix")] + [global::System.ComponentModel.DefaultValue("")] + public string SwiftPrefix + { + get { return __pbn__SwiftPrefix ?? ""; } + set { __pbn__SwiftPrefix = value; } + } + public bool ShouldSerializeSwiftPrefix() => __pbn__SwiftPrefix != null; + public void ResetSwiftPrefix() => __pbn__SwiftPrefix = null; + private string __pbn__SwiftPrefix; + + [global::ProtoBuf.ProtoMember(40, Name = @"php_class_prefix")] + [global::System.ComponentModel.DefaultValue("")] + public string PhpClassPrefix + { + get { return __pbn__PhpClassPrefix ?? ""; } + set { __pbn__PhpClassPrefix = value; } + } + public bool ShouldSerializePhpClassPrefix() => __pbn__PhpClassPrefix != null; + public void ResetPhpClassPrefix() => __pbn__PhpClassPrefix = null; + private string __pbn__PhpClassPrefix; + + [global::ProtoBuf.ProtoMember(999, Name = @"uninterpreted_option")] + public global::System.Collections.Generic.List UninterpretedOptions { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoContract()] + public enum OptimizeMode + { + [global::ProtoBuf.ProtoEnum(Name = @"SPEED")] + Speed = 1, + [global::ProtoBuf.ProtoEnum(Name = @"CODE_SIZE")] + CodeSize = 2, + [global::ProtoBuf.ProtoEnum(Name = @"LITE_RUNTIME")] + LiteRuntime = 3, + } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class MessageOptions : global::ProtoBuf.IExtensible + { + private global::ProtoBuf.IExtension __pbn__extensionData; + global::ProtoBuf.IExtension global::ProtoBuf.IExtensible.GetExtensionObject(bool createIfMissing) + => global::ProtoBuf.Extensible.GetExtensionObject(ref __pbn__extensionData, createIfMissing); + + [global::ProtoBuf.ProtoMember(1, Name = @"message_set_wire_format")] + [global::System.ComponentModel.DefaultValue(false)] + public bool MessageSetWireFormat + { + get { return __pbn__MessageSetWireFormat ?? false; } + set { __pbn__MessageSetWireFormat = value; } + } + public bool ShouldSerializeMessageSetWireFormat() => __pbn__MessageSetWireFormat != null; + public void ResetMessageSetWireFormat() => __pbn__MessageSetWireFormat = null; + private bool? __pbn__MessageSetWireFormat; + + [global::ProtoBuf.ProtoMember(2, Name = @"no_standard_descriptor_accessor")] + [global::System.ComponentModel.DefaultValue(false)] + public bool NoStandardDescriptorAccessor + { + get { return __pbn__NoStandardDescriptorAccessor ?? false; } + set { __pbn__NoStandardDescriptorAccessor = value; } + } + public bool ShouldSerializeNoStandardDescriptorAccessor() => __pbn__NoStandardDescriptorAccessor != null; + public void ResetNoStandardDescriptorAccessor() => __pbn__NoStandardDescriptorAccessor = null; + private bool? __pbn__NoStandardDescriptorAccessor; + + [global::ProtoBuf.ProtoMember(3, Name = @"deprecated")] + [global::System.ComponentModel.DefaultValue(false)] + public bool Deprecated + { + get { return __pbn__Deprecated ?? false; } + set { __pbn__Deprecated = value; } + } + public bool ShouldSerializeDeprecated() => __pbn__Deprecated != null; + public void ResetDeprecated() => __pbn__Deprecated = null; + private bool? __pbn__Deprecated; + + [global::ProtoBuf.ProtoMember(7, Name = @"map_entry")] + public bool MapEntry + { + get { return __pbn__MapEntry.GetValueOrDefault(); } + set { __pbn__MapEntry = value; } + } + public bool ShouldSerializeMapEntry() => __pbn__MapEntry != null; + public void ResetMapEntry() => __pbn__MapEntry = null; + private bool? __pbn__MapEntry; + + [global::ProtoBuf.ProtoMember(999, Name = @"uninterpreted_option")] + public global::System.Collections.Generic.List UninterpretedOptions { get; } = new global::System.Collections.Generic.List(); + + } + + [global::ProtoBuf.ProtoContract()] + public partial class FieldOptions : global::ProtoBuf.IExtensible + { + private global::ProtoBuf.IExtension __pbn__extensionData; + global::ProtoBuf.IExtension global::ProtoBuf.IExtensible.GetExtensionObject(bool createIfMissing) + => global::ProtoBuf.Extensible.GetExtensionObject(ref __pbn__extensionData, createIfMissing); + + [global::ProtoBuf.ProtoMember(1, Name = @"ctype")] + [global::System.ComponentModel.DefaultValue(CType.String)] + public CType Ctype + { + get { return __pbn__Ctype ?? CType.String; } + set { __pbn__Ctype = value; } + } + public bool ShouldSerializeCtype() => __pbn__Ctype != null; + public void ResetCtype() => __pbn__Ctype = null; + private CType? __pbn__Ctype; + + [global::ProtoBuf.ProtoMember(2, Name = @"packed")] + public bool Packed + { + get { return __pbn__Packed.GetValueOrDefault(); } + set { __pbn__Packed = value; } + } + public bool ShouldSerializePacked() => __pbn__Packed != null; + public void ResetPacked() => __pbn__Packed = null; + private bool? __pbn__Packed; + + [global::ProtoBuf.ProtoMember(6, Name = @"jstype")] + [global::System.ComponentModel.DefaultValue(JSType.JsNormal)] + public JSType Jstype + { + get { return __pbn__Jstype ?? JSType.JsNormal; } + set { __pbn__Jstype = value; } + } + public bool ShouldSerializeJstype() => __pbn__Jstype != null; + public void ResetJstype() => __pbn__Jstype = null; + private JSType? __pbn__Jstype; + + [global::ProtoBuf.ProtoMember(5, Name = @"lazy")] + [global::System.ComponentModel.DefaultValue(false)] + public bool Lazy + { + get { return __pbn__Lazy ?? false; } + set { __pbn__Lazy = value; } + } + public bool ShouldSerializeLazy() => __pbn__Lazy != null; + public void ResetLazy() => __pbn__Lazy = null; + private bool? __pbn__Lazy; + + [global::ProtoBuf.ProtoMember(3, Name = @"deprecated")] + [global::System.ComponentModel.DefaultValue(false)] + public bool Deprecated + { + get { return __pbn__Deprecated ?? false; } + set { __pbn__Deprecated = value; } + } + public bool ShouldSerializeDeprecated() => __pbn__Deprecated != null; + public void ResetDeprecated() => __pbn__Deprecated = null; + private bool? __pbn__Deprecated; + + [global::ProtoBuf.ProtoMember(10, Name = @"weak")] + [global::System.ComponentModel.DefaultValue(false)] + public bool Weak + { + get { return __pbn__Weak ?? false; } + set { __pbn__Weak = value; } + } + public bool ShouldSerializeWeak() => __pbn__Weak != null; + public void ResetWeak() => __pbn__Weak = null; + private bool? __pbn__Weak; + + [global::ProtoBuf.ProtoMember(999, Name = @"uninterpreted_option")] + public global::System.Collections.Generic.List UninterpretedOptions { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoContract()] + public enum CType + { + [global::ProtoBuf.ProtoEnum(Name = @"STRING")] + String = 0, + [global::ProtoBuf.ProtoEnum(Name = @"CORD")] + Cord = 1, + [global::ProtoBuf.ProtoEnum(Name = @"STRING_PIECE")] + StringPiece = 2, + } + + [global::ProtoBuf.ProtoContract()] + public enum JSType + { + [global::ProtoBuf.ProtoEnum(Name = @"JS_NORMAL")] + JsNormal = 0, + [global::ProtoBuf.ProtoEnum(Name = @"JS_STRING")] + JsString = 1, + [global::ProtoBuf.ProtoEnum(Name = @"JS_NUMBER")] + JsNumber = 2, + } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class OneofOptions : global::ProtoBuf.IExtensible + { + private global::ProtoBuf.IExtension __pbn__extensionData; + global::ProtoBuf.IExtension global::ProtoBuf.IExtensible.GetExtensionObject(bool createIfMissing) + => global::ProtoBuf.Extensible.GetExtensionObject(ref __pbn__extensionData, createIfMissing); + + [global::ProtoBuf.ProtoMember(999, Name = @"uninterpreted_option")] + public global::System.Collections.Generic.List UninterpretedOptions { get; } = new global::System.Collections.Generic.List(); + + } + + [global::ProtoBuf.ProtoContract()] + public partial class EnumOptions : global::ProtoBuf.IExtensible + { + private global::ProtoBuf.IExtension __pbn__extensionData; + global::ProtoBuf.IExtension global::ProtoBuf.IExtensible.GetExtensionObject(bool createIfMissing) + => global::ProtoBuf.Extensible.GetExtensionObject(ref __pbn__extensionData, createIfMissing); + + [global::ProtoBuf.ProtoMember(2, Name = @"allow_alias")] + public bool AllowAlias + { + get { return __pbn__AllowAlias.GetValueOrDefault(); } + set { __pbn__AllowAlias = value; } + } + public bool ShouldSerializeAllowAlias() => __pbn__AllowAlias != null; + public void ResetAllowAlias() => __pbn__AllowAlias = null; + private bool? __pbn__AllowAlias; + + [global::ProtoBuf.ProtoMember(3, Name = @"deprecated")] + [global::System.ComponentModel.DefaultValue(false)] + public bool Deprecated + { + get { return __pbn__Deprecated ?? false; } + set { __pbn__Deprecated = value; } + } + public bool ShouldSerializeDeprecated() => __pbn__Deprecated != null; + public void ResetDeprecated() => __pbn__Deprecated = null; + private bool? __pbn__Deprecated; + + [global::ProtoBuf.ProtoMember(999, Name = @"uninterpreted_option")] + public global::System.Collections.Generic.List UninterpretedOptions { get; } = new global::System.Collections.Generic.List(); + + } + + [global::ProtoBuf.ProtoContract()] + public partial class EnumValueOptions : global::ProtoBuf.IExtensible + { + private global::ProtoBuf.IExtension __pbn__extensionData; + global::ProtoBuf.IExtension global::ProtoBuf.IExtensible.GetExtensionObject(bool createIfMissing) + => global::ProtoBuf.Extensible.GetExtensionObject(ref __pbn__extensionData, createIfMissing); + + [global::ProtoBuf.ProtoMember(1, Name = @"deprecated")] + [global::System.ComponentModel.DefaultValue(false)] + public bool Deprecated + { + get { return __pbn__Deprecated ?? false; } + set { __pbn__Deprecated = value; } + } + public bool ShouldSerializeDeprecated() => __pbn__Deprecated != null; + public void ResetDeprecated() => __pbn__Deprecated = null; + private bool? __pbn__Deprecated; + + [global::ProtoBuf.ProtoMember(999, Name = @"uninterpreted_option")] + public global::System.Collections.Generic.List UninterpretedOptions { get; } = new global::System.Collections.Generic.List(); + + } + + [global::ProtoBuf.ProtoContract()] + public partial class ServiceOptions : global::ProtoBuf.IExtensible + { + private global::ProtoBuf.IExtension __pbn__extensionData; + global::ProtoBuf.IExtension global::ProtoBuf.IExtensible.GetExtensionObject(bool createIfMissing) + => global::ProtoBuf.Extensible.GetExtensionObject(ref __pbn__extensionData, createIfMissing); + + [global::ProtoBuf.ProtoMember(33, Name = @"deprecated")] + [global::System.ComponentModel.DefaultValue(false)] + public bool Deprecated + { + get { return __pbn__Deprecated ?? false; } + set { __pbn__Deprecated = value; } + } + public bool ShouldSerializeDeprecated() => __pbn__Deprecated != null; + public void ResetDeprecated() => __pbn__Deprecated = null; + private bool? __pbn__Deprecated; + + [global::ProtoBuf.ProtoMember(999, Name = @"uninterpreted_option")] + public global::System.Collections.Generic.List UninterpretedOptions { get; } = new global::System.Collections.Generic.List(); + + } + + [global::ProtoBuf.ProtoContract()] + public partial class MethodOptions : global::ProtoBuf.IExtensible + { + private global::ProtoBuf.IExtension __pbn__extensionData; + global::ProtoBuf.IExtension global::ProtoBuf.IExtensible.GetExtensionObject(bool createIfMissing) + => global::ProtoBuf.Extensible.GetExtensionObject(ref __pbn__extensionData, createIfMissing); + + [global::ProtoBuf.ProtoMember(33, Name = @"deprecated")] + [global::System.ComponentModel.DefaultValue(false)] + public bool Deprecated + { + get { return __pbn__Deprecated ?? false; } + set { __pbn__Deprecated = value; } + } + public bool ShouldSerializeDeprecated() => __pbn__Deprecated != null; + public void ResetDeprecated() => __pbn__Deprecated = null; + private bool? __pbn__Deprecated; + + [global::ProtoBuf.ProtoMember(34)] + [global::System.ComponentModel.DefaultValue(IdempotencyLevel.IdempotencyUnknown)] + public IdempotencyLevel idempotency_level + { + get { return __pbn__idempotency_level ?? IdempotencyLevel.IdempotencyUnknown; } + set { __pbn__idempotency_level = value; } + } + public bool ShouldSerializeidempotency_level() => __pbn__idempotency_level != null; + public void Resetidempotency_level() => __pbn__idempotency_level = null; + private IdempotencyLevel? __pbn__idempotency_level; + + [global::ProtoBuf.ProtoMember(999, Name = @"uninterpreted_option")] + public global::System.Collections.Generic.List UninterpretedOptions { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoContract()] + public enum IdempotencyLevel + { + [global::ProtoBuf.ProtoEnum(Name = @"IDEMPOTENCY_UNKNOWN")] + IdempotencyUnknown = 0, + [global::ProtoBuf.ProtoEnum(Name = @"NO_SIDE_EFFECTS")] + NoSideEffects = 1, + [global::ProtoBuf.ProtoEnum(Name = @"IDEMPOTENT")] + Idempotent = 2, + } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class UninterpretedOption + { + [global::ProtoBuf.ProtoMember(2, Name = @"name")] + public global::System.Collections.Generic.List Names { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoMember(3, Name = @"identifier_value")] + [global::System.ComponentModel.DefaultValue("")] + public string IdentifierValue + { + get { return __pbn__IdentifierValue ?? ""; } + set { __pbn__IdentifierValue = value; } + } + public bool ShouldSerializeIdentifierValue() => __pbn__IdentifierValue != null; + public void ResetIdentifierValue() => __pbn__IdentifierValue = null; + private string __pbn__IdentifierValue; + + [global::ProtoBuf.ProtoMember(4, Name = @"positive_int_value")] + public ulong PositiveIntValue + { + get { return __pbn__PositiveIntValue.GetValueOrDefault(); } + set { __pbn__PositiveIntValue = value; } + } + public bool ShouldSerializePositiveIntValue() => __pbn__PositiveIntValue != null; + public void ResetPositiveIntValue() => __pbn__PositiveIntValue = null; + private ulong? __pbn__PositiveIntValue; + + [global::ProtoBuf.ProtoMember(5, Name = @"negative_int_value")] + public long NegativeIntValue + { + get { return __pbn__NegativeIntValue.GetValueOrDefault(); } + set { __pbn__NegativeIntValue = value; } + } + public bool ShouldSerializeNegativeIntValue() => __pbn__NegativeIntValue != null; + public void ResetNegativeIntValue() => __pbn__NegativeIntValue = null; + private long? __pbn__NegativeIntValue; + + [global::ProtoBuf.ProtoMember(6, Name = @"double_value")] + public double DoubleValue + { + get { return __pbn__DoubleValue.GetValueOrDefault(); } + set { __pbn__DoubleValue = value; } + } + public bool ShouldSerializeDoubleValue() => __pbn__DoubleValue != null; + public void ResetDoubleValue() => __pbn__DoubleValue = null; + private double? __pbn__DoubleValue; + + [global::ProtoBuf.ProtoMember(7, Name = @"string_value")] + public byte[] StringValue + { + get { return __pbn__StringValue; } + set { __pbn__StringValue = value; } + } + public bool ShouldSerializeStringValue() => __pbn__StringValue != null; + public void ResetStringValue() => __pbn__StringValue = null; + private byte[] __pbn__StringValue; + + [global::ProtoBuf.ProtoMember(8, Name = @"aggregate_value")] + [global::System.ComponentModel.DefaultValue("")] + public string AggregateValue + { + get { return __pbn__AggregateValue ?? ""; } + set { __pbn__AggregateValue = value; } + } + public bool ShouldSerializeAggregateValue() => __pbn__AggregateValue != null; + public void ResetAggregateValue() => __pbn__AggregateValue = null; + private string __pbn__AggregateValue; + + [global::ProtoBuf.ProtoContract()] + public partial class NamePart + { + [global::ProtoBuf.ProtoMember(1, IsRequired = true)] + public string name_part { get; set; } + + [global::ProtoBuf.ProtoMember(2, Name = @"is_extension", IsRequired = true)] + public bool IsExtension { get; set; } + + } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class SourceCodeInfo + { + [global::ProtoBuf.ProtoMember(1, Name = @"location")] + public global::System.Collections.Generic.List Locations { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoContract()] + public partial class Location + { + [global::ProtoBuf.ProtoMember(1, Name = @"path", IsPacked = true)] + public int[] Paths { get; set; } + + [global::ProtoBuf.ProtoMember(2, Name = @"span", IsPacked = true)] + public int[] Spans { get; set; } + + [global::ProtoBuf.ProtoMember(3, Name = @"leading_comments")] + [global::System.ComponentModel.DefaultValue("")] + public string LeadingComments + { + get { return __pbn__LeadingComments ?? ""; } + set { __pbn__LeadingComments = value; } + } + public bool ShouldSerializeLeadingComments() => __pbn__LeadingComments != null; + public void ResetLeadingComments() => __pbn__LeadingComments = null; + private string __pbn__LeadingComments; + + [global::ProtoBuf.ProtoMember(4, Name = @"trailing_comments")] + [global::System.ComponentModel.DefaultValue("")] + public string TrailingComments + { + get { return __pbn__TrailingComments ?? ""; } + set { __pbn__TrailingComments = value; } + } + public bool ShouldSerializeTrailingComments() => __pbn__TrailingComments != null; + public void ResetTrailingComments() => __pbn__TrailingComments = null; + private string __pbn__TrailingComments; + + [global::ProtoBuf.ProtoMember(6, Name = @"leading_detached_comments")] + public global::System.Collections.Generic.List LeadingDetachedComments { get; } = new global::System.Collections.Generic.List(); + + } + + } + + [global::ProtoBuf.ProtoContract()] + public partial class GeneratedCodeInfo + { + [global::ProtoBuf.ProtoMember(1, Name = @"annotation")] + public global::System.Collections.Generic.List Annotations { get; } = new global::System.Collections.Generic.List(); + + [global::ProtoBuf.ProtoContract()] + public partial class Annotation + { + [global::ProtoBuf.ProtoMember(1, Name = @"path", IsPacked = true)] + public int[] Paths { get; set; } + + [global::ProtoBuf.ProtoMember(2, Name = @"source_file")] + [global::System.ComponentModel.DefaultValue("")] + public string SourceFile + { + get { return __pbn__SourceFile ?? ""; } + set { __pbn__SourceFile = value; } + } + public bool ShouldSerializeSourceFile() => __pbn__SourceFile != null; + public void ResetSourceFile() => __pbn__SourceFile = null; + private string __pbn__SourceFile; + + [global::ProtoBuf.ProtoMember(3, Name = @"begin")] + public int Begin + { + get { return __pbn__Begin.GetValueOrDefault(); } + set { __pbn__Begin = value; } + } + public bool ShouldSerializeBegin() => __pbn__Begin != null; + public void ResetBegin() => __pbn__Begin = null; + private int? __pbn__Begin; + + [global::ProtoBuf.ProtoMember(4, Name = @"end")] + public int End + { + get { return __pbn__End.GetValueOrDefault(); } + set { __pbn__End = value; } + } + public bool ShouldSerializeEnd() => __pbn__End != null; + public void ResetEnd() => __pbn__End = null; + private int? __pbn__End; + + } + + } + +} + +#pragma warning restore CS1591, CS0612, CS3021 diff --git a/Editor/protobuf-net.Reflection/Descriptor.cs.meta b/Editor/protobuf-net.Reflection/Descriptor.cs.meta new file mode 100644 index 0000000..fe22734 --- /dev/null +++ b/Editor/protobuf-net.Reflection/Descriptor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a97ff5a986e394a77a730ff22b235b42 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/NameNormalizer.cs b/Editor/protobuf-net.Reflection/NameNormalizer.cs new file mode 100644 index 0000000..f675775 --- /dev/null +++ b/Editor/protobuf-net.Reflection/NameNormalizer.cs @@ -0,0 +1,236 @@ +using Google.Protobuf.Reflection; +using System; +using System.Collections.Generic; +using System.Text.RegularExpressions; + + +namespace ProtoBuf.Reflection +{ + + internal class ParserException : Exception + { + public int ColumnNumber { get; } + public int LineNumber { get; } + public string File { get; } + public string Text { get; } + public string LineContents { get; } + public bool IsError { get; } + internal ParserException(Token token, string message, bool isError) + : base(message ?? "error") + { + ColumnNumber = token.ColumnNumber; + LineNumber = token.LineNumber; + File = token.File; + LineContents = token.LineContents; + Text = token.Value ?? ""; + IsError = isError; + } + } + /// + /// Provides general purpose name suggestions + /// + public abstract class NameNormalizer + { + private class NullNormalizer : NameNormalizer + { + protected override string GetName(string identifier) => identifier; + /// + /// Suggest a name with idiomatic pluralization + /// + public override string Pluralize(string identifier) => identifier; + } + private class DefaultNormalizer : NameNormalizer + { + protected override string GetName(string identifier) => AutoCapitalize(identifier); + /// + /// Suggest a name with idiomatic pluralization + /// + public override string Pluralize(string identifier) => AutoPluralize(identifier); + } + /// + /// Suggest a name with idiomatic name capitalization + /// + public static string AutoCapitalize(string identifier) + { + if (string.IsNullOrEmpty(identifier)) return identifier; + // if all upper-case, make proper-case + if (Regex.IsMatch(identifier, @"^[_A-Z0-9]*$")) + { + return Regex.Replace(identifier, @"(^|_)([A-Z0-9])([A-Z0-9]*)", + match => match.Groups[2].Value.ToUpperInvariant() + match.Groups[3].Value.ToLowerInvariant()); + } + // if all lower-case, make proper case + if (Regex.IsMatch(identifier, @"^[_a-z0-9]*$")) + { + return Regex.Replace(identifier, @"(^|_)([a-z0-9])([a-z0-9]*)", + match => match.Groups[2].Value.ToUpperInvariant() + match.Groups[3].Value.ToLowerInvariant()); + } + // just remove underscores - leave their chosen casing alone + return identifier.Replace("_", ""); + } + public static string AutoCapitalizeFullName(string fullName) + { + var names = fullName.Split('.'); + var s = ""; + for (int i = 0; i < names.Length; i++) + { + if (i == names.Length - 1) + { + s += $"{AutoCapitalize(names[i])}"; + } + else + { + s += $"{AutoCapitalize(names[i])}."; + } + } + return s; + } + /// + /// Suggest a name with idiomatic pluralization + /// + protected static string AutoPluralize(string identifier) + { + // horribly Anglo-centric and only covers common cases; but: is swappable + + if (string.IsNullOrEmpty(identifier) || identifier.Length == 1) return identifier; + + if (identifier.EndsWith("ss") || identifier.EndsWith("o")) return identifier + "es"; + if (identifier.EndsWith("is") && identifier.Length > 2) return identifier.Substring(0, identifier.Length - 2) + "es"; + + if (identifier.EndsWith("s")) return identifier; // misses some things (bus => buses), but: might already be pluralized + + if (identifier.EndsWith("y") && identifier.Length > 2) + { // identity => identities etc + switch (identifier[identifier.Length - 2]) + { + case 'a': + case 'e': + case 'i': + case 'o': + case 'u': + break; // only for consonant prefix + default: + return identifier.Substring(0, identifier.Length - 1) + "ies"; + } + } + return identifier + "s"; + } + /// + /// Name normalizer with default protobuf-net behaviour, using .NET idioms + /// + public static NameNormalizer Default { get; } = new DefaultNormalizer(); + /// + /// Name normalizer that passes through all identifiers without any changes + /// + public static NameNormalizer Null { get; } = new NullNormalizer(); + /// + /// Suggest a normalized identifier + /// + protected abstract string GetName(string identifier); + /// + /// Suggest a name with idiomatic pluralization + /// + public abstract string Pluralize(string identifier); + + /// + /// Suggest a normalized identifier + /// + public virtual string GetName(FileDescriptorProto definition) + { + var ns = definition?.Options?.GetOptions()?.Namespace; + if (!string.IsNullOrWhiteSpace(ns)) return ns; + ns = definition.Options?.CsharpNamespace; + if (string.IsNullOrWhiteSpace(ns)) ns = GetName(definition.Package); + + return string.IsNullOrWhiteSpace(ns) ? null : ns; + } + /// + /// Suggest a normalized identifier + /// + public virtual string GetName(DescriptorProto definition) + { + var name = definition?.Options?.GetOptions()?.Name; + if (!string.IsNullOrWhiteSpace(name)) return name; + return GetName(definition.Parent as DescriptorProto, GetName(definition.Name), definition.Name, false); + } + /// + /// Suggest a normalized identifier + /// + public virtual string GetName(EnumDescriptorProto definition) + { + var name = definition?.Options?.GetOptions()?.Name; + if (!string.IsNullOrWhiteSpace(name)) return name; + return GetName(definition.Parent as DescriptorProto, GetName(definition.Name), definition.Name, false); + } + /// + /// Suggest a normalized identifier + /// + public virtual string GetName(EnumValueDescriptorProto definition) + { + var name = definition?.Options?.GetOptions()?.Name; + if (!string.IsNullOrWhiteSpace(name)) return name; + return AutoCapitalize(definition.Name); + } + /// + /// Suggest a normalized identifier + /// + public virtual string GetName(FieldDescriptorProto definition) + { + var name = definition?.Options?.GetOptions()?.Name; + if (!string.IsNullOrWhiteSpace(name)) return name; + var preferred = GetName(definition.Name); + if (definition.label == FieldDescriptorProto.Label.LabelRepeated) + { + preferred = Pluralize(preferred); + } + return GetName(definition.Parent as DescriptorProto, preferred, definition.Name, true); + } + /// + /// Obtain a set of all names defined for a message + /// + protected HashSet BuildConflicts(DescriptorProto parent, bool includeDescendents) + { + var conflicts = new HashSet(); + if (parent != null) + { + conflicts.Add(GetName(parent)); + if (includeDescendents) + { + foreach (var type in parent.NestedTypes) + { + conflicts.Add(GetName(type)); + } + foreach (var type in parent.EnumTypes) + { + conflicts.Add(GetName(type)); + } + } + } + return conflicts; + } + /// + /// Get the preferred name for an element + /// + protected virtual string GetName(DescriptorProto parent, string preferred, string fallback, bool includeDescendents) + { + var conflicts = BuildConflicts(parent, includeDescendents); + + if (!conflicts.Contains(preferred)) return preferred; + if (!conflicts.Contains(fallback)) return fallback; + + var attempt = preferred + "Value"; + if (!conflicts.Contains(attempt)) return attempt; + + attempt = fallback + "Value"; + if (!conflicts.Contains(attempt)) return attempt; + + int i = 1; + while (true) + { + attempt = preferred + i.ToString(); + if (!conflicts.Contains(attempt)) return attempt; + } + } + } + +} diff --git a/Editor/protobuf-net.Reflection/NameNormalizer.cs.meta b/Editor/protobuf-net.Reflection/NameNormalizer.cs.meta new file mode 100644 index 0000000..4deb5fe --- /dev/null +++ b/Editor/protobuf-net.Reflection/NameNormalizer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: acacb9da00b544a9782cc2e20c7cf13a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/Parsers.cs b/Editor/protobuf-net.Reflection/Parsers.cs new file mode 100644 index 0000000..53504c4 --- /dev/null +++ b/Editor/protobuf-net.Reflection/Parsers.cs @@ -0,0 +1,2841 @@ +using Google.Protobuf.Reflection; +using ProtoBuf; +using ProtoBuf.Reflection; +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.RegularExpressions; + +namespace Google.Protobuf.Reflection +{ +#pragma warning disable CS1591 + + interface IType + { + IType Parent { get; } + string FullyQualifiedName { get; } + + IType Find(string name); + } + partial class FileDescriptorSet + { + internal const string Namespace = ".google.protobuf."; + public Func ImportValidator { get; set; } + + internal List importPaths = new List(); + public void AddImportPath(string path) + { + importPaths.Add(path); + } + public Error[] GetErrors() => Error.GetArray(Errors); + internal List Errors { get; } = new List(); + + public bool Add(string name, bool includeInOutput, TextReader source = null) + { + if (string.IsNullOrWhiteSpace(name)) + throw new ArgumentNullException(nameof(name)); + if (Path.IsPathRooted(name) || name.Contains("..")) + throw new ArgumentException("Paths should be relative to the import paths, not rooted", nameof(name)); + FileDescriptorProto descriptor; + if (TryResolve(name, out descriptor)) + { + if (includeInOutput) descriptor.IncludeInOutput = true; + return true; // already exists, that counts as success + } + + using (var reader = source ?? Open(name)) + { + if (reader == null) return false; // not found + + descriptor = new FileDescriptorProto + { + Name = name, + IncludeInOutput = includeInOutput + }; + Files.Add(descriptor); + + descriptor.Parse(reader, Errors, name); + return true; + } + } + + private TextReader Open(string name) + { + var found = FindFile(name); + if (found == null) return null; + return File.OpenText(found); + } + string FindFile(string file) + { + foreach (var path in importPaths) + { + var rel = Path.Combine(path, file); + if (File.Exists(rel)) return rel; + } + return null; + } + + bool TryResolve(string name, out FileDescriptorProto descriptor) + { + descriptor = Files.FirstOrDefault(x => string.Equals(x.Name, name, StringComparison.OrdinalIgnoreCase)); + return descriptor != null; + } + + private void ApplyImports() + { + bool didSomething; + do + { + didSomething = false; + var file = Files.FirstOrDefault(x => x.HasPendingImports); + if (file != null) + { + // note that GetImports clears the flag + foreach (var import in file.GetImports(true)) + { + if (!(ImportValidator?.Invoke(import.Path) ?? true)) + { + Errors.Error(import.Token, $"import of {import.Path} is disallowed"); + } + else if (Add(import.Path, false)) + { + didSomething = true; + } + else + { + Errors.Error(import.Token, $"unable to find: '{import.Path}'"); + } + } + } + } while (didSomething); + } + + public void Process() + { + ApplyImports(); + foreach (var file in Files) + { + using (var ctx = new ParserContext(file, null, Errors)) + { + file.BuildTypeHierarchy(this, ctx); + } + } + foreach (var file in Files) + { + using (var ctx = new ParserContext(file, null, Errors)) + { + file.ResolveTypes(ctx, false); + } + } + foreach (var file in Files) + { + using (var ctx = new ParserContext(file, null, Errors)) + { + file.ResolveTypes(ctx, true); + } + } + } + + public T Serialize(Func customSerializer, bool includeImports, object state = null) + { + T result; + if (includeImports || Files.All(x => x.IncludeInOutput)) + { + result = customSerializer(this, state); + } + else + { + var snapshort = Files.ToArray(); + Files.RemoveAll(x => !x.IncludeInOutput); + result = customSerializer(this, state); + Files.Clear(); + Files.AddRange(snapshort); + } + return result; + } + + public void Serialize(Stream destination, bool includeImports) + { + Serialize((s,o) => { Serializer.Serialize((Stream)o, s); return true; }, includeImports, destination); + } + + internal FileDescriptorProto GetFile(string path) + // try full match first, then name-only match + => Files.FirstOrDefault(x => string.Equals(x.Name, path, StringComparison.OrdinalIgnoreCase)); + } + partial class DescriptorProto : ISchemaObject, IType, IMessage + { + public static byte[] GetExtensionData(IExtensible obj) + { + var ext = obj?.GetExtensionObject(false); + int len; + if (ext == null || (len = ext.GetLength()) == 0) return null; + var s = ext.BeginQuery(); + try + { + if (s is MemoryStream) return ((MemoryStream)s).ToArray(); + + byte[] buffer = new byte[len]; + int offset = 0, read; + while ((read = s.Read(buffer, offset, len)) > 0) + { + offset += read; + len -= read; + } + if (len != 0) throw new EndOfStreamException(); + return buffer; + } + finally + { + ext.EndQuery(s); + } + } + public static void SetExtensionData(IExtensible obj, byte[] data) + { + if (obj == null || data == null || data.Length == 0) return; + var ext = obj.GetExtensionObject(true); + (ext as IExtensionResettable)?.Reset(); + var s = ext.BeginAppend(); + try + { + s.Write(data, 0, data.Length); + ext.EndAppend(s, true); + } + catch + { + ext.EndAppend(s, false); + throw; + } + } + + public override string ToString() => Name; + internal IType Parent { get; set; } + IType IType.Parent => Parent; + string IType.FullyQualifiedName => FullyQualifiedName; + IType IType.Find(string name) + { + return (IType)NestedTypes.FirstOrDefault(x => string.Equals(x.Name, name, StringComparison.OrdinalIgnoreCase)) + ?? (IType)EnumTypes.FirstOrDefault(x => string.Equals(x.Name, name, StringComparison.OrdinalIgnoreCase)); + } + internal string FullyQualifiedName { get; set; } + + List IMessage.Types => NestedTypes; + + internal int MaxField => (Options?.MessageSetWireFormat == true) ? int.MaxValue : FieldDescriptorProto.DefaultMaxField; + int IMessage.MaxField => MaxField; + + + internal static bool TryParse(ParserContext ctx, IHazNames parent, out DescriptorProto obj) + { + var name = ctx.Tokens.Consume(TokenType.AlphaNumeric); + ctx.CheckNames(parent, name, ctx.Tokens.Previous); + if (ctx.TryReadObject(out obj)) + { + obj.Name = name; + return true; + } + return false; + } + void ISchemaObject.ReadOne(ParserContext ctx) + { + var tokens = ctx.Tokens; + if (tokens.ConsumeIf(TokenType.AlphaNumeric, "message")) + { + DescriptorProto obj; + if (DescriptorProto.TryParse(ctx, this, out obj)) + { + NestedTypes.Add(obj); + } + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "enum")) + { + EnumDescriptorProto obj; + if (EnumDescriptorProto.TryParse(ctx, this, out obj)) + EnumTypes.Add(obj); + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "option")) + { + Options = ctx.ParseOptionStatement(Options, this); + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "reserved")) + { + ParseReservedRanges(ctx); + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "extensions")) + { + ParseExtensionRange(ctx); + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "extend")) + { + FieldDescriptorProto.ParseExtensions(ctx, this); + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "oneof")) + { + OneofDescriptorProto.Parse(ctx, this); + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "map")) + { + ParseMap(ctx); + } + else + { + FieldDescriptorProto obj; + if (FieldDescriptorProto.TryParse(ctx, this, false, out obj)) + Fields.Add(obj); + } + } + + + private void ParseMap(ParserContext ctx) + { + ctx.AbortState = AbortState.Statement; + var tokens = ctx.Tokens; + tokens.Consume(TokenType.Symbol, "<"); + var keyName = tokens.Consume(TokenType.AlphaNumeric); + var keyToken = tokens.Previous; + FieldDescriptorProto.Type keyType; + if (FieldDescriptorProto.TryIdentifyType(keyName, out keyType)) + { + keyName = null; + } + switch (keyType) + { + case 0: + case FieldDescriptorProto.Type.TypeBytes: + case FieldDescriptorProto.Type.TypeMessage: + case FieldDescriptorProto.Type.TypeGroup: + case FieldDescriptorProto.Type.TypeFloat: + case FieldDescriptorProto.Type.TypeDouble: + ctx.Errors.Error(tokens.Previous, "invalid map key type (only integral and string types are allowed)"); + break; + } + tokens.Consume(TokenType.Symbol, ","); + var valueName = tokens.Consume(TokenType.AlphaNumeric); + var valueToken = tokens.Previous; + FieldDescriptorProto.Type valueType; + if (FieldDescriptorProto.TryIdentifyType(valueName, out valueType)) + { + valueName = null; + } + tokens.Consume(TokenType.Symbol, ">"); + + var name = tokens.Consume(TokenType.AlphaNumeric); + var nameToken = tokens.Previous; + ctx.CheckNames(this, name, nameToken); + + tokens.Consume(TokenType.Symbol, "="); + int number = tokens.ConsumeInt32(); + + var jsonName = FieldDescriptorProto.GetJsonName(name); + var typeName = jsonName.Substring(0, 1).ToUpperInvariant() + jsonName.Substring(1) + "Entry"; + ctx.CheckNames(this, typeName, nameToken); + + var field = new FieldDescriptorProto + { + type = FieldDescriptorProto.Type.TypeMessage, + TypeName = typeName, + Name = name, + JsonName = jsonName, + Number = number, + label = FieldDescriptorProto.Label.LabelRepeated, + TypeToken = nameToken + }; + + if (tokens.ConsumeIf(TokenType.Symbol, "[")) + { + field.Options = ctx.ParseOptionBlock(field.Options, field); + } + Fields.Add(field); + + var msgType = new DescriptorProto + { + Name = typeName, + Fields = + { + new FieldDescriptorProto + { + label = FieldDescriptorProto.Label.LabelOptional, + Name = "key", + JsonName = "key", + Number = 1, + type = keyType, + TypeName = keyName, + TypeToken = keyToken, + }, + new FieldDescriptorProto + { + label = FieldDescriptorProto.Label.LabelOptional, + Name = "value", + JsonName = "value", + Number = 2, + type = valueType, + TypeName = valueName, + TypeToken = valueToken, + } + } + }; + if (msgType.Options == null) msgType.Options = new MessageOptions(); + msgType.Options.MapEntry = true; + NestedTypes.Add(msgType); + + ctx.AbortState = AbortState.None; + } + + private void ParseExtensionRange(ParserContext ctx) + { + ctx.AbortState = AbortState.Statement; + var tokens = ctx.Tokens; + tokens.Previous.RequireProto2(ctx); + + while (true) + { + int from = tokens.ConsumeInt32(MaxField), to = from; + if (tokens.Read().Is(TokenType.AlphaNumeric, "to")) + { + tokens.Consume(); + to = tokens.ConsumeInt32(MaxField); + } + // the end is off by one + if (to != int.MaxValue) to++; + ExtensionRanges.Add(new ExtensionRange { Start = from, End = to }); + + if (tokens.ConsumeIf(TokenType.Symbol, ",")) + { + tokens.Consume(); + } + else if (tokens.ConsumeIf(TokenType.Symbol, ";")) + { + break; + } + else + { + tokens.Read().Throw("unable to parse extension range"); + } + } + ctx.AbortState = AbortState.None; + } + + + + private void ParseReservedRanges(ParserContext ctx) + { + ctx.AbortState = AbortState.Statement; + var tokens = ctx.Tokens; + var token = tokens.Read(); // test the first one to determine what we're doing + switch (token.Type) + { + case TokenType.StringLiteral: + while (true) + { + var name = tokens.Consume(TokenType.StringLiteral); + var conflict = Fields.FirstOrDefault(x => x.Name == name); + if (conflict != null) + { + ctx.Errors.Error(tokens.Previous, $"'{conflict.Name}' is already in use by field {conflict.Number}"); + } + ReservedNames.Add(name); + + if (tokens.ConsumeIf(TokenType.Symbol, ",")) + { + } + else if (tokens.ConsumeIf(TokenType.Symbol, ";")) + { + break; + } + else + { + tokens.Read().Throw("unable to parse reserved range"); + } + } + break; + case TokenType.AlphaNumeric: + while (true) + { + int from = tokens.ConsumeInt32(), to = from; + if (tokens.Read().Is(TokenType.AlphaNumeric, "to")) + { + tokens.Consume(); + to = tokens.ConsumeInt32(); + } + var conflict = Fields.FirstOrDefault(x => x.Number >= from && x.Number <= to); + if (conflict != null) + { + ctx.Errors.Error(tokens.Previous, $"field {conflict.Number} is already in use by '{conflict.Name}'"); + } + ReservedRanges.Add(new ReservedRange { Start = from, End = to + 1 }); + + token = tokens.Read(); + if (token.Is(TokenType.Symbol, ",")) + { + tokens.Consume(); + } + else if (token.Is(TokenType.Symbol, ";")) + { + tokens.Consume(); + break; + } + else + { + token.Throw(); + } + } + break; + default: + throw token.Throw(); + } + ctx.AbortState = AbortState.None; + } + + IEnumerable IHazNames.GetNames() + { + foreach (var field in Fields) yield return field.Name; + foreach (var type in NestedTypes) yield return type.Name; + foreach (var type in EnumTypes) yield return type.Name; + foreach (var name in ReservedNames) yield return name; + } + } + + partial class OneofDescriptorProto : ISchemaObject + { + internal DescriptorProto Parent { get; set; } + internal static void Parse(ParserContext ctx, DescriptorProto parent) + { + ctx.AbortState = AbortState.Object; + var oneOf = new OneofDescriptorProto + { + Name = ctx.Tokens.Consume(TokenType.AlphaNumeric) + }; + parent.OneofDecls.Add(oneOf); + oneOf.Parent = parent; + + if (ctx.TryReadObjectImpl(oneOf)) + { + ctx.AbortState = AbortState.None; + } + } + void ISchemaObject.ReadOne(ParserContext ctx) + { + var tokens = ctx.Tokens; + if (tokens.ConsumeIf(TokenType.AlphaNumeric, "option")) + { + Options = ctx.ParseOptionStatement(Options, this); + } + else + { + FieldDescriptorProto field; + if (FieldDescriptorProto.TryParse(ctx, Parent, true, out field)) + { + field.OneofIndex = Parent.OneofDecls.Count() - 1; + Parent.Fields.Add(field); + } + } + } + } + partial class OneofOptions : ISchemaOptions + { + string ISchemaOptions.Extendee => FileDescriptorSet.Namespace + nameof(OneofOptions); + public byte[] ExtensionData + { + get { return DescriptorProto.GetExtensionData(this); } + set { DescriptorProto.SetExtensionData(this, value); } + } + bool ISchemaOptions.Deprecated { get { return false; } set { } } + bool ISchemaOptions.ReadOne(ParserContext ctx, string key) => false; + } + partial class FileDescriptorProto : ISchemaObject, IMessage, IType + { + internal static FileDescriptorProto GetFile(IType type) + { + while (type != null) + { + if (type is FileDescriptorProto) return (FileDescriptorProto)type; + type = type.Parent; + } + return null; + } + int IMessage.MaxField => FieldDescriptorProto.DefaultMaxField; + List IMessage.Fields => null; + List IMessage.Extensions => Extensions; + List IMessage.Types => MessageTypes; + + public override string ToString() => Name; + + string IType.FullyQualifiedName => null; + IType IType.Parent => null; + IType IType.Find(string name) + { + return (IType)MessageTypes.FirstOrDefault(x => string.Equals(x.Name, name, StringComparison.OrdinalIgnoreCase)) + ?? (IType)EnumTypes.FirstOrDefault(x => string.Equals(x.Name, name, StringComparison.OrdinalIgnoreCase)); + } + internal bool HasPendingImports { get; private set; } + internal FileDescriptorSet Parent { get; private set; } + + internal bool IncludeInOutput { get; set; } + + public bool HasImports() => _imports.Count != 0; + internal IEnumerable GetImports(bool resetPendingFlag = false) + { + if (resetPendingFlag) + { + HasPendingImports = false; + } + return _imports; + } + readonly List _imports = new List(); + internal bool AddImport(string path, bool isPublic, Token token) + { + var existing = _imports.FirstOrDefault(x => string.Equals(x.Path, path, StringComparison.OrdinalIgnoreCase)); + if (existing != null) + { + // we'll allow this to upgrade + if (isPublic) existing.IsPublic = true; + return false; + } + HasPendingImports = true; + _imports.Add(new Import { Path = path, IsPublic = isPublic, Token = token }); + return true; + } + + internal const string SyntaxProto2 = "proto2", SyntaxProto3 = "proto3"; + + void ISchemaObject.ReadOne(ParserContext ctx) + { + var tokens = ctx.Tokens; + Token token; + if (tokens.ConsumeIf(TokenType.AlphaNumeric, "message")) + { + DescriptorProto obj; + if (DescriptorProto.TryParse(ctx, this, out obj)) + MessageTypes.Add(obj); + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "enum")) + { + EnumDescriptorProto obj; + if (EnumDescriptorProto.TryParse(ctx, this, out obj)) + EnumTypes.Add(obj); + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "extend")) + { + FieldDescriptorProto.ParseExtensions(ctx, this); + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "service")) + { + ServiceDescriptorProto obj; + if (ServiceDescriptorProto.TryParse(ctx, out obj)) + Services.Add(obj); + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "import")) + { + ctx.AbortState = AbortState.Statement; + bool isPublic = tokens.ConsumeIf(TokenType.AlphaNumeric, "public"); + string path = tokens.Consume(TokenType.StringLiteral); + + if (!AddImport(path, isPublic, tokens.Previous)) + { + ctx.Errors.Warn(tokens.Previous, $"duplicate import: '{path}'"); + } + tokens.Consume(TokenType.Symbol, ";"); + ctx.AbortState = AbortState.None; + + + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "syntax")) + { + ctx.AbortState = AbortState.Statement; + if (MessageTypes.Any() || EnumTypes.Any() || Extensions.Any()) + { + ctx.Errors.Error(tokens.Previous, "syntax must be set before types are defined"); + } + tokens.Consume(TokenType.Symbol, "="); + Syntax = tokens.Consume(TokenType.StringLiteral); + switch (Syntax) + { + case SyntaxProto2: + case SyntaxProto3: + break; + default: + ctx.Errors.Error(tokens.Previous, $"unknown syntax '{Syntax}'"); + break; + } + tokens.Consume(TokenType.Symbol, ";"); + ctx.AbortState = AbortState.None; + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "package")) + { + ctx.AbortState = AbortState.Statement; + Package = tokens.Consume(TokenType.AlphaNumeric); + ctx.AbortState = AbortState.None; + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "option")) + { + Options = ctx.ParseOptionStatement(Options, this); + } + else if (tokens.Peek(out token)) + { + token.Throw(); + } // else EOF + } + + public void Parse(TextReader schema, List errors, string file) + { + Syntax = ""; + using (var ctx = new ParserContext(this, new Peekable(schema.Tokenize(file).RemoveCommentsAndWhitespace()), errors)) + { + var tokens = ctx.Tokens; + Token startOfFile; + tokens.Peek(out startOfFile); // want this for "stuff you didn't do" warnings + + // read the file into the object + ctx.Fill(this); + + // finish up + if (string.IsNullOrWhiteSpace(Syntax)) + { + ctx.Errors.Warn(startOfFile, "no syntax specified; it is strongly recommended to specify 'syntax=\"proto2\";' or 'syntax=\"proto3\";'"); + } + if (Syntax == "" || Syntax == SyntaxProto2) + { + Syntax = null; // for output compatibility; is blank even if set to proto2 explicitly + } + } + } + + + internal bool TryResolveEnum(string typeName, IType parent, out EnumDescriptorProto @enum, bool allowImports, bool treatAllAsPublic = false) + { + IType type; + if (TryResolveType(typeName, parent, out type, allowImports, true, treatAllAsPublic)) + { + @enum = type as EnumDescriptorProto; + return @enum != null; + } + @enum = null; + return false; + } + internal bool TryResolveMessage(string typeName, IType parent, out DescriptorProto message, bool allowImports, bool treatAllAsPublic = false) + { + IType type; + if (TryResolveType(typeName, parent, out type, allowImports, true, treatAllAsPublic)) + { + message = type as DescriptorProto; + return message != null; + } + message = null; + return false; + } + internal static bool TrySplit(string input, out string left, out string right) + { + var split = input.IndexOf('.'); + if (split < 0) + { + left = right = null; + return false; + } + left = input.Substring(0, split).Trim(); + right = input.Substring(split + 1).Trim(); + return true; + } + internal static bool TrySplitLast(string input, out string left, out string right) + { + var split = input.LastIndexOf('.'); + if (split < 0) + { + left = right = null; + return false; + } + left = input.Substring(0, split).Trim(); + right = input.Substring(split + 1).Trim(); + return true; + } + bool TryResolveFromFile(FileDescriptorProto file, string ee, string ion, out FieldDescriptorProto fld, bool withPackageName, bool ai) + { + fld = null; + if (file == null) return false; + + if (withPackageName) + { + var pkg = file.Package; + if (string.IsNullOrWhiteSpace(pkg)) return false; // we're only looking *with* packages right now + + if (!ion.StartsWith(pkg + ".")) return false; // wrong file + + ion = ion.Substring(pkg.Length + 1); // and fully qualified (non-qualified is a second pass) + } + + return file.TryResolveExtension(ee, ion, out fld, ai, false); + } + private bool TryResolveExtension(string extendee, string extension, out FieldDescriptorProto field, bool allowImports = true, bool checkOwnPackage = true) + { + bool isRooted = extension.StartsWith("."); + if (isRooted) + { + // rooted + extension = extension.Substring(1); // remove the root + } + string left; + string right; + if (TrySplitLast(extension, out left, out right)) + { + IType type; + if (TryResolveType(left, null, out type, true, true)) + { + field = (type as DescriptorProto)?.Extensions?.FirstOrDefault(x => x.Extendee == extendee + && x.Name == right); + if (field != null) return true; + } + } + else + { + field = Extensions?.FirstOrDefault(x => x.Extendee == extendee && x.Name == extension); + if (field != null) return true; + } + + if (checkOwnPackage) + { + if (TryResolveFromFile(this, extendee, extension, out field, true, false)) return true; + if (TryResolveFromFile(this, extendee, extension, out field, false, false)) return true; + } + if (allowImports) + { + foreach (var import in _imports) + { + var file = Parent?.GetFile(import.Path); + if (file != null) + { + if (TryResolveFromFile(file, extendee, extension, out field, true, import.IsPublic)) + { + import.Used = true; + return true; + } + } + } + foreach (var import in _imports) + { + var file = Parent?.GetFile(import.Path); + if (file != null) + { + if (TryResolveFromFile(file, extendee, extension, out field, false, import.IsPublic)) + { + import.Used = true; + return true; + } + } + } + } + field = null; + return false; + } + bool TryResolveFromFile(FileDescriptorProto file, string tn, bool ai, out IType tp, bool withPackageName, bool treatAllAsPublic) + { + tp = null; + if (file == null) return false; + + if (withPackageName) + { + var pkg = file.Package; + if (string.IsNullOrWhiteSpace(pkg)) return false; // we're only looking *with* packages right now + + if (!tn.StartsWith(pkg + ".")) return false; // wrong file + + tn = tn.Substring(pkg.Length + 1); // and fully qualified (non-qualified is a second pass) + } + + return file.TryResolveType(tn, file, out tp, ai, false, treatAllAsPublic); + } + internal bool TryResolveType(string typeName, IType parent, out IType type, bool allowImports, bool checkOwnPackage = true, bool treatAllAsPublic = false) + { + bool isRooted = typeName.StartsWith("."); + string left; + string right; + if (isRooted) + { + // rooted + typeName = typeName.Substring(1); // remove the root + } + else if (TrySplit(typeName, out left, out right)) + { + while (parent != null) + { + var next = parent?.Find(left); + if (next != null && TryResolveType(right, next, out type, false, treatAllAsPublic)) return true; + + parent = parent.Parent; + } + } + else + { + // simple name + while (parent != null) + { + type = parent.Find(typeName); + if (type != null) + { + return true; + } + parent = parent.Parent; + } + } + + if (checkOwnPackage && TryResolveFromFile(this, typeName, false, out type, true, treatAllAsPublic)) return true; + if (checkOwnPackage && TryResolveFromFile(this, typeName, false, out type, false, treatAllAsPublic)) return true; + + // look at imports + // check for the name including the package prefix + foreach (var import in _imports) + { + if (allowImports || import.IsPublic || treatAllAsPublic) + { + var file = Parent?.GetFile(import.Path); + if (TryResolveFromFile(file, typeName, false, out type, true, treatAllAsPublic)) + { + import.Used = true; + return true; + } + } + } + + // now look without package prefix + foreach (var import in _imports) + { + if (allowImports || import.IsPublic || treatAllAsPublic) + { + var file = Parent?.GetFile(import.Path); + if (TryResolveFromFile(file, typeName, false, out type, false, treatAllAsPublic)) + { + import.Used = true; + return true; + } + } + } + + type = null; + return false; + + } + + static void SetParents(string prefix, EnumDescriptorProto parent) + { + parent.FullyQualifiedName = prefix + "." + parent.Name; + foreach (var val in parent.Values) + { + val.Parent = parent; + } + } + static void SetParents(string prefix, DescriptorProto parent) + { + var fqn = parent.FullyQualifiedName = prefix + "." + parent.Name; + foreach (var field in parent.Fields) + { + field.Parent = parent; + } + foreach (var @enum in parent.EnumTypes) + { + @enum.Parent = parent; + SetParents(fqn, @enum); + } + foreach (var child in parent.NestedTypes) + { + child.Parent = parent; + SetParents(fqn, child); + } + foreach (var ext in parent.Extensions) + { + ext.Parent = parent; + } + } + internal void BuildTypeHierarchy(FileDescriptorSet set, ParserContext ctx) + { + // build the tree starting at the root + Parent = set; + var prefix = string.IsNullOrWhiteSpace(Package) ? "" : ("." + Package); + foreach (var type in EnumTypes) + { + type.Parent = this; + SetParents(prefix, type); + } + foreach (var type in MessageTypes) + { + type.Parent = this; + SetParents(prefix, type); + } + foreach (var type in Extensions) + { + type.Parent = this; + } + } + + static bool ShouldResolveType(FieldDescriptorProto.Type type) + { + switch (type) + { + case 0: + case FieldDescriptorProto.Type.TypeMessage: + case FieldDescriptorProto.Type.TypeEnum: + case FieldDescriptorProto.Type.TypeGroup: + return true; + default: + return false; + } + } + private void ResolveTypes(ParserContext ctx, List fields, IType parent, bool options) + { + foreach (var field in fields) + { + if (options) ResolveOptions(ctx, field.Options); + else + { + if (!string.IsNullOrEmpty(field.TypeName) && ShouldResolveType(field.type)) + { + // TODO: use TryResolveType once rather than twice + string fqn; + DescriptorProto msg; + EnumDescriptorProto @enum; + if (TryResolveMessage(field.TypeName, parent, out msg, true)) + { + if (field.type != FieldDescriptorProto.Type.TypeGroup) + { + field.type = FieldDescriptorProto.Type.TypeMessage; + } + fqn = msg?.FullyQualifiedName; + } + else if (TryResolveEnum(field.TypeName, parent, out @enum, true)) + { + field.type = FieldDescriptorProto.Type.TypeEnum; + if (!string.IsNullOrWhiteSpace(field.DefaultValue) + & !@enum.Values.Any(x => x.Name == field.DefaultValue)) + { + ctx.Errors.Error(field.TypeToken, $"enum {@enum.Name} does not contain value '{field.DefaultValue}'"); + } + fqn = @enum?.FullyQualifiedName; + } + else + { + ctx.Errors.Add(field.TypeToken.TypeNotFound(field.TypeName)); + fqn = field.TypeName; + field.type = FieldDescriptorProto.Type.TypeMessage; // just an assumption + } + field.TypeName = fqn; + } + + if (!string.IsNullOrEmpty(field.Extendee)) + { + string fqn; + DescriptorProto msg; + if (TryResolveMessage(field.Extendee, parent, out msg, true)) + { + fqn = msg?.FullyQualifiedName; + } + else + { + ctx.Errors.Add(field.TypeToken.TypeNotFound(field.Extendee)); + fqn = field.Extendee; + } + field.Extendee = fqn; + } + + if (field.Options?.Packed ?? false) + { + bool canPack = FieldDescriptorProto.CanPack(field.type); + if (!canPack) + { + ctx.Errors.Error(field.TypeToken, $"field of type {field.type} cannot be packed"); + field.Options.Packed = false; + } + } + } + } + } + + private void ResolveTypes(ParserContext ctx, ServiceDescriptorProto service, bool options) + { + if (options) ResolveOptions(ctx, service.Options); + foreach (var method in service.Methods) + { + if (options) ResolveOptions(ctx, method.Options); + else + { + DescriptorProto msg; + if (!TryResolveMessage(method.InputType, this, out msg, true)) + { + ctx.Errors.Add(method.InputTypeToken.TypeNotFound(method.InputType)); + } + method.InputType = msg?.FullyQualifiedName; + if (!TryResolveMessage(method.OutputType, this, out msg, true)) + { + ctx.Errors.Add(method.OutputTypeToken.TypeNotFound(method.OutputType)); + } + method.OutputType = msg?.FullyQualifiedName; + } + } + } + + private void ResolveTypes(ParserContext ctx, DescriptorProto type, bool options) + { + if (options) + { + ResolveOptions(ctx, type.Options); + foreach (var decl in type.OneofDecls) + ResolveOptions(ctx, decl.Options); + } + + ResolveTypes(ctx, type.Fields, type, options); + ResolveTypes(ctx, type.Extensions, type, options); + foreach (var nested in type.NestedTypes) + { + ResolveTypes(ctx, nested, options); + } + foreach (var nested in type.EnumTypes) + { + ResolveTypes(ctx, nested, options); + } + } + + + IEnumerable IHazNames.GetNames() + { + foreach (var type in MessageTypes) yield return type.Name; + foreach (var type in EnumTypes) yield return type.Name; + } + internal void ResolveTypes(ParserContext ctx, bool options) + { + if (options) ResolveOptions(ctx, Options); + foreach (var type in MessageTypes) + { + ResolveTypes(ctx, type, options); + } + foreach (var type in EnumTypes) + { + ResolveTypes(ctx, type, options); + } + foreach (var service in Services) + { + ResolveTypes(ctx, service, options); + } + ResolveTypes(ctx, Extensions, this, options); + + if (options) // can only process deps on the second pass, once options have been resolved + { + HashSet publicDependencies = null; + foreach (var import in _imports) + { + if (!Dependencies.Contains(import.Path)) + Dependencies.Add(import.Path); + if (import.IsPublic) + { + (publicDependencies ?? (publicDependencies = new HashSet())).Add(import.Path); + } + if (IncludeInOutput && !import.Used) + { + ctx.Errors.Warn(import.Token, $"import not used: '{import.Path}'"); + } + } + // note that Dependencies should stay in declaration order to be consistent with protoc + if (publicDependencies != null) + { + var arr = publicDependencies.Select(path => Dependencies.IndexOf(path)).ToArray(); + Array.Sort(arr); + PublicDependencies = arr; + } + } + } + + private void ResolveTypes(ParserContext ctx, EnumDescriptorProto type, bool options) + { + if (options) + { + ResolveOptions(ctx, type.Options); + foreach (var val in type.Values) + { + ResolveOptions(ctx, val.Options); + } + } + } + + private void ResolveOptions(ParserContext ctx, ISchemaOptions options) + { + if (options == null || options.UninterpretedOptions.Count == 0) return; + + var extension = ((IExtensible)options).GetExtensionObject(true); + var target = extension.BeginAppend(); + try + { + using (var writer = new ProtoWriter(target, null, null)) + { + var hive = OptionHive.Build(options.UninterpretedOptions); + + // first pass is used to sort the fields so we write them in the right order + AppendOptions(this, writer, ctx, options.Extendee, hive.Children, true, 0, false); + // second pass applies the data + AppendOptions(this, writer, ctx, options.Extendee, hive.Children, false, 0, false); + } + options.UninterpretedOptions.RemoveAll(x => x.Applied); + } + finally + { + extension.EndAppend(target, true); + } + + } + + class OptionHive + { + public OptionHive(string name, bool isExtension, Token token) + { + Name = name; + IsExtension = isExtension; + Token = token; + } + public override string ToString() + { + var sb = new StringBuilder(); + Concat(sb); + return sb.ToString(); + } + private void Concat(StringBuilder sb) + { + bool isFirst = true; + foreach (var value in Options) + { + if (!isFirst) sb.Append(", "); + isFirst = false; + sb.Append(value); + } + foreach (var child in Children) + { + if (!isFirst) sb.Append(", "); + sb.Append(child.Name).Append("={"); + child.Concat(sb); + sb.Append("}"); + } + } + public bool IsExtension { get; } + public string Name { get; } + public Token Token { get; } + public List Options { get; } = new List(); + public List Children { get; } = new List(); + public FieldDescriptorProto Field { get; set; } + + public static OptionHive Build(List options) + { + if (options == null || options.Count == 0) return null; + + var root = new OptionHive(null, false, default(Token)); + foreach (var option in options) + { + var level = root; + OptionHive nextLevel = null; + foreach (var name in option.Names) + { + nextLevel = level.Children.FirstOrDefault(x => x.Name == name.name_part && x.IsExtension == name.IsExtension); + if (nextLevel == null) + { + nextLevel = new OptionHive(name.name_part, name.IsExtension, name.Token); + level.Children.Add(nextLevel); + } + level = nextLevel; + } + level.Options.Add(option); + } + return root; + } + } + private static void AppendOptions(FileDescriptorProto file, ProtoWriter writer, ParserContext ctx, string extendee, List options, bool resolveOnly, int depth, bool messageSet) + { + foreach (var option in options) + AppendOption(file, writer, ctx, extendee, option, resolveOnly, depth, messageSet); + + if (resolveOnly && depth != 0) // fun fact: proto writes root fields in *file* order, but sub-fields in *field* order + { + // ascending field order + options.Sort((x, y) => (x.Field?.Number ?? 0).CompareTo(y.Field?.Number ?? 0)); + } + } + private static bool ShouldWrite(FieldDescriptorProto f, string v, string d){ + return f.label != FieldDescriptorProto.Label.LabelOptional || v != (f.DefaultValue ?? d); + } + private static void AppendOption(FileDescriptorProto file, ProtoWriter writer, ParserContext ctx, string extendee, OptionHive option, bool resolveOnly, int depth, bool messageSet) + { + // resolve the field for this level + FieldDescriptorProto field = option.Field; + DescriptorProto msg; + if (field != null) + { + // already resolved + } + else if (option.IsExtension) + { + if (!file.TryResolveExtension(extendee, option.Name, out field)) field = null; + } + else if (file.TryResolveMessage(extendee, null, out msg, true)) + { + field = msg.Fields.FirstOrDefault(x => x.Name == option.Name); + } + else + { + field = null; + } + + if (field == null) + { + if (!resolveOnly) + { + ctx.Errors.Error(option.Token, $"unable to resolve custom option '{option.Name}' for '{extendee}'"); + } + return; + } + option.Field = field; + + switch (field.type) + { + case FieldDescriptorProto.Type.TypeMessage: + case FieldDescriptorProto.Type.TypeGroup: + var nextFile = GetFile(field.Parent as IType); + DescriptorProto fieldType; + var nextMessageSet = !resolveOnly && nextFile.TryResolveMessage(field.TypeName, null, out fieldType, true) + && (fieldType.Options?.MessageSetWireFormat ?? false); + + if (option.Children.Count != 0) + { + if (resolveOnly) + { + AppendOptions(nextFile, writer, ctx, field.TypeName, option.Children, resolveOnly, depth + 1, nextMessageSet); + } + else if (messageSet) + { + ProtoWriter.WriteFieldHeader(1, WireType.StartGroup, writer); + var grp = ProtoWriter.StartSubItem(null, writer); + + ProtoWriter.WriteFieldHeader(2, WireType.Variant, writer); + ProtoWriter.WriteInt32(field.Number, writer); + + ProtoWriter.WriteFieldHeader(3, WireType.String, writer); + var payload = ProtoWriter.StartSubItem(null, writer); + + AppendOptions(nextFile, writer, ctx, field.TypeName, option.Children, resolveOnly, depth + 1, nextMessageSet); + + ProtoWriter.EndSubItem(payload, writer); + ProtoWriter.EndSubItem(grp, writer); + } + else + { + ProtoWriter.WriteFieldHeader(field.Number, + field.type == FieldDescriptorProto.Type.TypeGroup ? WireType.StartGroup : WireType.String, writer); + var tok = ProtoWriter.StartSubItem(null, writer); + + AppendOptions(nextFile, writer, ctx, field.TypeName, option.Children, resolveOnly, depth + 1, nextMessageSet); + + ProtoWriter.EndSubItem(tok, writer); + } + } + if (resolveOnly) return; // nothing more to do + + if (option.Options.Count == 1 && !option.Options.Single().ShouldSerializeAggregateValue()) + { + // need to write an empty object to match protoc + if (messageSet) + { + ProtoWriter.WriteFieldHeader(1, WireType.StartGroup, writer); + var grp = ProtoWriter.StartSubItem(null, writer); + + ProtoWriter.WriteFieldHeader(2, WireType.Variant, writer); + ProtoWriter.WriteInt32(field.Number, writer); + + ProtoWriter.WriteFieldHeader(3, WireType.String, writer); + var payload = ProtoWriter.StartSubItem(null, writer); + ProtoWriter.EndSubItem(payload, writer); + ProtoWriter.EndSubItem(grp, writer); + } + else + { + ProtoWriter.WriteFieldHeader(field.Number, + field.type == FieldDescriptorProto.Type.TypeGroup ? WireType.StartGroup : WireType.String, writer); + var payload = ProtoWriter.StartSubItem(null, writer); + ProtoWriter.EndSubItem(payload, writer); + } + option.Options.Single().Applied = true; + } + else + { + foreach (var values in option.Options) + { + ctx.Errors.Error(option.Token, $"unable to assign custom option '{option.Name}' for '{extendee}'"); + } + } + break; + default: + if (resolveOnly) return; // nothing more to do + + foreach (var child in option.Children) + { + ctx.Errors.Error(option.Token, $"unable to assign custom option '{child.Name}' for '{extendee}'"); + } + foreach (var value in option.Options) + { + int i32; + switch (field.type) + { + case FieldDescriptorProto.Type.TypeFloat: + float f32; + if (!TokenExtensions.TryParseSingle(value.AggregateValue, out f32)) + { + ctx.Errors.Error(option.Token, $"invalid value for floating point '{field.TypeName}': '{option.Name}' = '{value.AggregateValue}'"); + continue; + } + if (ShouldWrite(field, value.AggregateValue, "0")) + { + ProtoWriter.WriteFieldHeader(field.Number, WireType.Fixed32, writer); + ProtoWriter.WriteSingle(f32, writer); + } + break; + case FieldDescriptorProto.Type.TypeDouble: + double f64; + if (!TokenExtensions.TryParseDouble(value.AggregateValue, out f64)) + { + ctx.Errors.Error(option.Token, $"invalid value for floating point '{field.TypeName}': '{option.Name}' = '{value.AggregateValue}'"); + continue; + } + if (ShouldWrite(field, value.AggregateValue, "0")) + { + ProtoWriter.WriteFieldHeader(field.Number, WireType.Fixed64, writer); + ProtoWriter.WriteDouble(f64, writer); + } + break; + case FieldDescriptorProto.Type.TypeBool: + switch (value.AggregateValue) + { + case "true": + i32 = 1; + break; + case "false": + i32 = 0; + break; + default: + ctx.Errors.Error(option.Token, $"invalid value for boolean '{field.TypeName}': '{option.Name}' = '{value.AggregateValue}'"); + continue; + } + if (ShouldWrite(field, value.AggregateValue, "false")) + { + ProtoWriter.WriteFieldHeader(field.Number, WireType.Variant, writer); + ProtoWriter.WriteInt32(i32, writer); + } + break; + case FieldDescriptorProto.Type.TypeUint32: + case FieldDescriptorProto.Type.TypeFixed32: + { + uint ui32; + if (!TokenExtensions.TryParseUInt32(value.AggregateValue, out ui32)) + { + ctx.Errors.Error(option.Token, $"invalid value for unsigned integer '{field.TypeName}': '{option.Name}' = '{value.AggregateValue}'"); + continue; + } + if (ShouldWrite(field, value.AggregateValue, "0")) + { + switch (field.type) + { + case FieldDescriptorProto.Type.TypeUint32: + ProtoWriter.WriteFieldHeader(field.Number, WireType.Variant, writer); + break; + case FieldDescriptorProto.Type.TypeFixed32: + ProtoWriter.WriteFieldHeader(field.Number, WireType.Fixed32, writer); + break; + } + ProtoWriter.WriteUInt32(ui32, writer); + } + } + break; + case FieldDescriptorProto.Type.TypeUint64: + case FieldDescriptorProto.Type.TypeFixed64: + { + ulong ui64; + if (!TokenExtensions.TryParseUInt64(value.AggregateValue, out ui64)) + { + ctx.Errors.Error(option.Token, $"invalid value for unsigned integer '{field.TypeName}': '{option.Name}' = '{value.AggregateValue}'"); + continue; + } + if (ShouldWrite(field, value.AggregateValue, "0")) + { + switch (field.type) + { + case FieldDescriptorProto.Type.TypeUint64: + ProtoWriter.WriteFieldHeader(field.Number, WireType.Variant, writer); + break; + case FieldDescriptorProto.Type.TypeFixed64: + ProtoWriter.WriteFieldHeader(field.Number, WireType.Fixed64, writer); + break; + } + ProtoWriter.WriteUInt64(ui64, writer); + } + } + break; + case FieldDescriptorProto.Type.TypeInt32: + case FieldDescriptorProto.Type.TypeSint32: + case FieldDescriptorProto.Type.TypeSfixed32: + if (!TokenExtensions.TryParseInt32(value.AggregateValue, out i32)) + { + ctx.Errors.Error(option.Token, $"invalid value for integer '{field.TypeName}': '{option.Name}' = '{value.AggregateValue}'"); + continue; + } + if (ShouldWrite(field, value.AggregateValue, "0")) + { + switch (field.type) + { + case FieldDescriptorProto.Type.TypeInt32: + ProtoWriter.WriteFieldHeader(field.Number, WireType.Variant, writer); + break; + case FieldDescriptorProto.Type.TypeSint32: + ProtoWriter.WriteFieldHeader(field.Number, WireType.SignedVariant, writer); + break; + case FieldDescriptorProto.Type.TypeSfixed32: + ProtoWriter.WriteFieldHeader(field.Number, WireType.Fixed32, writer); + break; + } + ProtoWriter.WriteInt32(i32, writer); + } + break; + case FieldDescriptorProto.Type.TypeInt64: + case FieldDescriptorProto.Type.TypeSint64: + case FieldDescriptorProto.Type.TypeSfixed64: + { + long i64; + if (!TokenExtensions.TryParseInt64(value.AggregateValue, out i64)) + { + ctx.Errors.Error(option.Token, $"invalid value for integer '{field.TypeName}': '{option.Name}' = '{value.AggregateValue}'"); + continue; + } + if (ShouldWrite(field, value.AggregateValue, "0")) + { + switch (field.type) + { + case FieldDescriptorProto.Type.TypeInt64: + ProtoWriter.WriteFieldHeader(field.Number, WireType.Variant, writer); + break; + case FieldDescriptorProto.Type.TypeSint64: + ProtoWriter.WriteFieldHeader(field.Number, WireType.SignedVariant, writer); + break; + case FieldDescriptorProto.Type.TypeSfixed64: + ProtoWriter.WriteFieldHeader(field.Number, WireType.Fixed64, writer); + break; + } + ProtoWriter.WriteInt64(i64, writer); + } + } + break; + case FieldDescriptorProto.Type.TypeEnum: + EnumDescriptorProto @enum; + if (file.TryResolveEnum(field.TypeName, null, out @enum, true, true)) + { + var found = @enum.Values.FirstOrDefault(x => x.Name == value.AggregateValue); + if (found == null) + { + ctx.Errors.Error(option.Token, $"invalid value for enum '{field.TypeName}': '{option.Name}' = '{value.AggregateValue}'"); + continue; + } + else + { + if (ShouldWrite(field, value.AggregateValue, @enum.Values.FirstOrDefault()?.Name)) + { + ProtoWriter.WriteFieldHeader(field.Number, WireType.Variant, writer); + ProtoWriter.WriteInt32(found.Number, writer); + } + } + } + else + { + ctx.Errors.Error(option.Token, $"unable to resolve enum '{field.TypeName}': '{option.Name}' = '{value.AggregateValue}'"); + continue; + } + break; + case FieldDescriptorProto.Type.TypeString: + case FieldDescriptorProto.Type.TypeBytes: + if (ShouldWrite(field, value.AggregateValue, "")) + { + ProtoWriter.WriteFieldHeader(field.Number, WireType.String, writer); + if (value.AggregateValue == null || value.AggregateValue.IndexOf('\\') < 0) + ProtoWriter.WriteString(value.AggregateValue ?? "", writer); + else + { + using (var ms = new MemoryStream(value.AggregateValue.Length)) + { + if (!LoadBytes(ms, value.AggregateValue)) + { + ctx.Errors.Error(option.Token, $"invalid escape sequence '{field.TypeName}': '{option.Name}' = '{value.AggregateValue}'"); + continue; + } +#if NETSTANDARD1_3 + if (ms.TryGetBuffer(out var seg)) + ProtoWriter.WriteBytes(seg.Array, seg.Offset, seg.Count, writer); + else + ProtoWriter.WriteBytes(ms.ToArray(), writer); +#else + ProtoWriter.WriteBytes(ms.GetBuffer(), 0, (int)ms.Length, writer); +#endif + } + } + } + break; + default: + ctx.Errors.Error(option.Token, $"{field.type} options not yet implemented: '{option.Name}' = '{value.AggregateValue}'"); + continue; + } + value.Applied = true; + } + break; + } + } + + private static unsafe bool LoadBytes(Stream ms, string value) + { + bool isEscaped = false; + byte* b = stackalloc byte[10]; + foreach (char c in value) + { + if (isEscaped) + { + isEscaped = false; + // only a few things remain escaped after ConsumeString: + switch (c) + { + case '\\': ms.WriteByte((byte)'\\'); break; + case '\'': ms.WriteByte((byte)'\''); break; + case '"': ms.WriteByte((byte)'"'); break; + case 'r': ms.WriteByte((byte)'\r'); break; + case 'n': ms.WriteByte((byte)'\n'); break; + case 't': ms.WriteByte((byte)'\t'); break; + default: return false; + } + } + else if (c == '\\') + { + isEscaped = true; + } + else + { + var x = c; // can't take address of readonly local + int bytes = Encoding.UTF8.GetBytes(&x, 1, b, 10); + for (int i = 0; i < bytes; i++) + { + ms.WriteByte(b[i]); + } + } + } + return !isEscaped; + } + } + + partial class EnumDescriptorProto : ISchemaObject, IType + { + public override string ToString() => Name; + internal IType Parent { get; set; } + string IType.FullyQualifiedName => FullyQualifiedName; + IType IType.Parent => Parent; + IType IType.Find(string name) => null; + internal string FullyQualifiedName { get; set; } + + internal static bool TryParse(ParserContext ctx, IHazNames parent, out EnumDescriptorProto obj) + { + var name = ctx.Tokens.Consume(TokenType.AlphaNumeric); + ctx.CheckNames(parent, name, ctx.Tokens.Previous); + if (ctx.TryReadObject(out obj)) + { + obj.Name = name; + return true; + } + return false; + } + + void ISchemaObject.ReadOne(ParserContext ctx) + { + ctx.AbortState = AbortState.Statement; + var tokens = ctx.Tokens; + if (tokens.ConsumeIf(TokenType.AlphaNumeric, "option")) + { + Options = ctx.ParseOptionStatement(Options, this); + } + else + { + Values.Add(EnumValueDescriptorProto.Parse(ctx)); + } + ctx.AbortState = AbortState.None; + } + + } + partial class FieldDescriptorProto : ISchemaObject + { + + public bool IsPacked(string syntax) + { + if (label != Label.LabelRepeated) return false; + + var exp = Options?.Packed; + if (exp.HasValue) return exp.GetValueOrDefault(); + + if (syntax != FileDescriptorProto.SyntaxProto2 && FieldDescriptorProto.CanPack(type)) + { + return true; + } + + return false; + } + public override string ToString() => Name; + internal const int DefaultMaxField = 536870911; + internal const int FirstReservedField = 19000; + internal const int LastReservedField = 19999; + + internal IMessage Parent { get; set; } + internal Token TypeToken { get; set; } + + internal int MaxField => Parent?.MaxField ?? DefaultMaxField; + + internal static void NotAllowedOneOf(ParserContext ctx) + { + var token = ctx.Tokens.Previous; + ctx.Errors.Error(token, $"'{token.Value}' not allowed with 'oneof'"); + } + + internal static bool TryParse(ParserContext ctx, IMessage parent, bool isOneOf, out FieldDescriptorProto field) + { + var tokens = ctx.Tokens; + ctx.AbortState = AbortState.Statement; + Label label = Label.LabelOptional; // default + + if (tokens.ConsumeIf(TokenType.AlphaNumeric, "repeated")) + { + if (isOneOf) NotAllowedOneOf(ctx); + label = Label.LabelRepeated; + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "required")) + { + if (isOneOf) NotAllowedOneOf(ctx); + else tokens.Previous.RequireProto2(ctx); + label = Label.LabelRequired; + } + else if (tokens.ConsumeIf(TokenType.AlphaNumeric, "optional")) + { + if (isOneOf) NotAllowedOneOf(ctx); + else tokens.Previous.RequireProto2(ctx); + label = Label.LabelOptional; + } + else if (ctx.Syntax == FileDescriptorProto.SyntaxProto2 && !isOneOf) + { + // required in proto2 + throw tokens.Read().Throw("expected 'repeated' / 'required' / 'optional'"); + } + + var typeToken = tokens.Read(); + if (typeToken.Is(TokenType.AlphaNumeric, "map")) + { + tokens.Previous.Throw($"'{tokens.Previous.Value}' can not be used with 'map'"); + } + string typeName = tokens.Consume(TokenType.AlphaNumeric); + + var isGroup = typeName == "group"; + if (isGroup) + { + //if (isOneOf) NotAllowedOneOf(ctx); + //else if (parentTyped == null) + //{ + // ctx.Errors.Error(tokens.Previous, "group not allowed in this context"); + //} + ctx.AbortState = AbortState.Object; + } + + string name = tokens.Consume(TokenType.AlphaNumeric); + var nameToken = tokens.Previous; + tokens.Consume(TokenType.Symbol, "="); + var number = tokens.ConsumeInt32(); + var numberToken = tokens.Previous; + + if (number < 1 || number > parent.MaxField) + { + ctx.Errors.Error(numberToken, $"field numbers must be in the range 1-{parent.MaxField}"); + } + else if (number >= FirstReservedField && number <= LastReservedField) + { + ctx.Errors.Warn(numberToken, $"field numbers in the range {FirstReservedField}-{LastReservedField} are reserved; this may cause problems on many implementations"); + } + ctx.CheckNames(parent, name, nameToken); + if (parent is DescriptorProto) + { + var parentTyped = parent as DescriptorProto; + var conflict = parentTyped.Fields.FirstOrDefault(x => x.Number == number); + if (conflict != null) + { + ctx.Errors.Error(numberToken, $"field {number} is already in use by '{conflict.Name}'"); + } + if (parentTyped.ReservedNames.Contains(name)) + { + ctx.Errors.Error(nameToken, $"field '{name}' is reserved"); + } + if (parentTyped.ReservedRanges.Any(x => x.Start <= number && x.End > number)) + { + ctx.Errors.Error(numberToken, $"field {number} is reserved"); + } + } + + Type type; + if (isGroup) + { + type = Type.TypeGroup; + typeName = name; + + typeToken.RequireProto2(ctx); + + var firstChar = typeName[0].ToString(); + if (firstChar.ToLowerInvariant() == firstChar) + { + ctx.Errors.Error(nameToken, "group names must start with an upper-case letter"); + } + name = typeName.ToLowerInvariant(); + DescriptorProto grpType; + if (ctx.TryReadObject(out grpType)) + { + grpType.Name = typeName; + ctx.CheckNames(parent, typeName, nameToken); + parent?.Types?.Add(grpType); + } + } + else if (TryIdentifyType(typeName, out type)) + { + typeName = null; + } + + field = new FieldDescriptorProto + { + type = type, + TypeName = typeName, + Name = name, + JsonName = GetJsonName(name), + Number = number, + label = label, + TypeToken = typeToken // internal property that helps give useful error messages + }; + + if (!isGroup) + { + if (tokens.ConsumeIf(TokenType.Symbol, "[")) + { + field.Options = ctx.ParseOptionBlock(field.Options, field); + } + + tokens.Consume(TokenType.Symbol, ";"); + } + ctx.AbortState = AbortState.None; + return true; + } + static readonly char[] Underscores = { '_' }; + internal static string GetJsonName(string name) + => Regex.Replace(name, "_+([0-9a-zA-Z])", match => match.Groups[1].Value.ToUpperInvariant()).TrimEnd(Underscores); + + + internal static bool CanPack(Type type) + { + switch (type) + { + case Type.TypeBool: + case Type.TypeDouble: + case Type.TypeEnum: + case Type.TypeFixed32: + case Type.TypeFixed64: + case Type.TypeFloat: + case Type.TypeInt32: + case Type.TypeInt64: + case Type.TypeSfixed32: + case Type.TypeSfixed64: + case Type.TypeSint32: + case Type.TypeSint64: + case Type.TypeUint32: + case Type.TypeUint64: + return true; + default: + return false; + } + } + internal static bool Assign(Type @in, out Type @out) + { + @out = @in; + return true; + } + internal static bool TryIdentifyType(string typeName, out Type type) + { + switch (typeName) + { + case "bool": return Assign(Type.TypeBool, out @type); + case "bytes": return Assign(Type.TypeBytes, out @type); + case "double": return Assign(Type.TypeDouble, out @type); + case "fixed32": return Assign(Type.TypeFixed32, out @type); + case "fixed64": return Assign(Type.TypeFixed64, out @type); + case "float": return Assign(Type.TypeFloat, out @type); + case "int32": return Assign(Type.TypeInt32, out @type); + case "int64": return Assign(Type.TypeInt64, out @type); + case "sfixed32": return Assign(Type.TypeSfixed32, out @type); + case "sfixed64": return Assign(Type.TypeSfixed64, out @type); + case "sint32": return Assign(Type.TypeSint32, out @type); + case "sint64": return Assign(Type.TypeSint64, out @type); + case "string": return Assign(Type.TypeString, out @type); + case "uint32": return Assign(Type.TypeUint32, out @type); + case "uint64": return Assign(Type.TypeUint64, out @type); + default: + type = default(Type); + return false; + } + } + + internal static void ParseExtensions(ParserContext ctx, IMessage message) + { + var extendee = ctx.Tokens.Consume(TokenType.AlphaNumeric); + var dummy = new DummyExtensions(extendee, message); + ctx.TryReadObjectImpl(dummy); + } + + void ISchemaObject.ReadOne(ParserContext ctx) + { + throw new InvalidOperationException(); + } + + class DummyExtensions : ISchemaObject, IHazNames, IMessage + { + int IMessage.MaxField => message.MaxField; + List IMessage.Types => message.Types; + List IMessage.Extensions => message.Extensions; + List IMessage.Fields => message.Fields; + public byte[] ExtensionData + { + get { return null; } + set { } + } + IEnumerable IHazNames.GetNames() + { + var fields = message.Fields; + if (fields != null) + { + foreach (var field in fields) yield return field.Name; + } + foreach (var field in message.Extensions) yield return field.Name; + foreach (var type in message.Types) yield return type.Name; + } + + void ISchemaObject.ReadOne(ParserContext ctx) + { + ctx.AbortState = AbortState.Statement; + FieldDescriptorProto field; + if (TryParse(ctx, this, false, out field)) + { + field.Extendee = extendee; + message.Extensions.Add(field); + } + ctx.AbortState = AbortState.None; + } + + private IMessage message; + private string extendee; + + public DummyExtensions(string extendee, IMessage message) + { + this.extendee = extendee; + this.message = message; + } + } + } + + internal interface IMessage : IHazNames + { + int MaxField { get; } + List Types { get; } + List Extensions { get; } + List Fields { get; } + } + + partial class ServiceDescriptorProto : ISchemaObject + { + internal static bool TryParse(ParserContext ctx, out ServiceDescriptorProto obj) + { + var name = ctx.Tokens.Consume(TokenType.AlphaNumeric); + if (ctx.TryReadObject(out obj)) + { + obj.Name = name; + return true; + } + return false; + } + void ISchemaObject.ReadOne(ParserContext ctx) + { + ctx.AbortState = AbortState.Statement; + var tokens = ctx.Tokens; + + if (tokens.ConsumeIf(TokenType.AlphaNumeric, "option")) + { + Options = ctx.ParseOptionStatement(Options, this); + } + else + { + // is a method + Methods.Add(MethodDescriptorProto.Parse(ctx)); + } + ctx.AbortState = AbortState.None; + } + } + + partial class MethodDescriptorProto : ISchemaObject + { + internal Token InputTypeToken { get; set; } + internal Token OutputTypeToken { get; set; } + + internal static MethodDescriptorProto Parse(ParserContext ctx) + { + var tokens = ctx.Tokens; + tokens.Consume(TokenType.AlphaNumeric, "rpc"); + var name = tokens.Consume(TokenType.AlphaNumeric); + tokens.Consume(TokenType.Symbol, "("); + bool isInputStream = tokens.ConsumeIf(TokenType.AlphaNumeric, "stream"); + var inputTypeToken = tokens.Read(); + var inputType = tokens.Consume(TokenType.AlphaNumeric); + tokens.Consume(TokenType.Symbol, ")"); + tokens.Consume(TokenType.AlphaNumeric, "returns"); + tokens.Consume(TokenType.Symbol, "("); + bool isOutputStream = tokens.ConsumeIf(TokenType.AlphaNumeric, "stream"); + var outputTypeToken = tokens.Read(); + var outputType = tokens.Consume(TokenType.AlphaNumeric); + tokens.Consume(TokenType.Symbol, ")"); + + var method = new MethodDescriptorProto + { + Name = name, + InputType = inputType, + OutputType = outputType, + InputTypeToken = inputTypeToken, + OutputTypeToken = outputTypeToken + }; + if (isInputStream) method.ClientStreaming = true; + if (isOutputStream) method.ServerStreaming = true; + Token token; + if (tokens.Peek(out token) && token.Is(TokenType.Symbol, "{")) + { + ctx.AbortState = AbortState.Object; + ctx.TryReadObjectImpl(method); + } + else + { + tokens.Consume(TokenType.Symbol, ";"); + } + return method; + } + + void ISchemaObject.ReadOne(ParserContext ctx) + { + ctx.Tokens.Consume(TokenType.AlphaNumeric, "option"); + Options = ctx.ParseOptionStatement(Options, this); + } + } + + partial class EnumValueDescriptorProto + { + internal static EnumValueDescriptorProto Parse(ParserContext ctx) + { + var tokens = ctx.Tokens; + string name = tokens.Consume(TokenType.AlphaNumeric); + tokens.Consume(TokenType.Symbol, "="); + var value = tokens.ConsumeInt32(); + + var obj = new EnumValueDescriptorProto { Name = name, Number = value }; + if (tokens.ConsumeIf(TokenType.Symbol, "[")) + { + obj.Options = ctx.ParseOptionBlock(obj.Options); + } + tokens.Consume(TokenType.Symbol, ";"); + return obj; + } + internal EnumDescriptorProto Parent { get; set; } + + } + partial class MessageOptions : ISchemaOptions + { + string ISchemaOptions.Extendee => FileDescriptorSet.Namespace + nameof(MessageOptions); + bool ISchemaOptions.ReadOne(ParserContext ctx, string key) + { + switch (key) + { + case "map_entry": + MapEntry = ctx.Tokens.ConsumeBoolean(); + ctx.Errors.Error(ctx.Tokens.Previous, "'map_entry' should not be set explicitly; use 'map' instead"); + return true; + case "message_set_wire_format": MessageSetWireFormat = ctx.Tokens.ConsumeBoolean(); return true; + case "no_standard_descriptor_accessor": NoStandardDescriptorAccessor = ctx.Tokens.ConsumeBoolean(); return true; + default: return false; + } + } + public byte[] ExtensionData + { + get { return DescriptorProto.GetExtensionData(this); } + set { DescriptorProto.SetExtensionData(this, value); } + } + } + partial class MethodOptions : ISchemaOptions + { + string ISchemaOptions.Extendee => FileDescriptorSet.Namespace + nameof(MethodOptions); + bool ISchemaOptions.ReadOne(ParserContext ctx, string key) + { + switch (key) + { + case "idempotency_level": idempotency_level = ctx.Tokens.ConsumeEnum(); return true; + default: return false; + } + } + public byte[] ExtensionData + { + get { return DescriptorProto.GetExtensionData(this); } + set { DescriptorProto.SetExtensionData(this, value); } + } + } + partial class ServiceOptions : ISchemaOptions + { + string ISchemaOptions.Extendee => FileDescriptorSet.Namespace + nameof(ServiceOptions); + bool ISchemaOptions.ReadOne(ParserContext ctx, string key) => false; + + public byte[] ExtensionData + { + get { return DescriptorProto.GetExtensionData(this); } + set { DescriptorProto.SetExtensionData(this, value); } + } + } + + partial class UninterpretedOption + { + partial class NamePart + { + public override string ToString() => IsExtension ? ("(" + name_part + ")") : name_part; + internal Token Token { get; set; } + } + internal bool Applied { get; set; } + internal Token Token { get; set; } + } + partial class EnumOptions : ISchemaOptions + { + string ISchemaOptions.Extendee => FileDescriptorSet.Namespace + nameof(EnumOptions); + bool ISchemaOptions.ReadOne(ParserContext ctx, string key) + { + switch (key) + { + case "allow_alias": AllowAlias = ctx.Tokens.ConsumeBoolean(); return true; + default: return false; + } + } + public byte[] ExtensionData + { + get { return DescriptorProto.GetExtensionData(this); } + set { DescriptorProto.SetExtensionData(this, value); } + } + } + partial class EnumValueOptions : ISchemaOptions + { + string ISchemaOptions.Extendee => FileDescriptorSet.Namespace + nameof(EnumValueOptions); + bool ISchemaOptions.ReadOne(ParserContext ctx, string key) => false; + + public byte[] ExtensionData + { + get { return DescriptorProto.GetExtensionData(this); } + set { DescriptorProto.SetExtensionData(this, value); } + } + } + partial class FieldOptions : ISchemaOptions + { + string ISchemaOptions.Extendee => FileDescriptorSet.Namespace + nameof(FieldOptions); + bool ISchemaOptions.ReadOne(ParserContext ctx, string key) + { + switch (key) + { + case "jstype": Jstype = ctx.Tokens.ConsumeEnum(); return true; + case "ctype": Ctype = ctx.Tokens.ConsumeEnum(); return true; + case "lazy": Lazy = ctx.Tokens.ConsumeBoolean(); return true; + case "packed": Packed = ctx.Tokens.ConsumeBoolean(); return true; + case "weak": Weak = ctx.Tokens.ConsumeBoolean(); return true; + default: return false; + } + } + + public byte[] ExtensionData + { + get { return DescriptorProto.GetExtensionData(this); } + set { DescriptorProto.SetExtensionData(this, value); } + } + } + partial class FileOptions : ISchemaOptions + { + string ISchemaOptions.Extendee => FileDescriptorSet.Namespace + nameof(FileOptions); + bool ISchemaOptions.ReadOne(ParserContext ctx, string key) + { + switch (key) + { + case "optimize_for": OptimizeFor = ctx.Tokens.ConsumeEnum(); return true; + case "cc_enable_arenas": CcEnableArenas = ctx.Tokens.ConsumeBoolean(); return true; + case "cc_generic_services": CcGenericServices = ctx.Tokens.ConsumeBoolean(); return true; +#pragma warning disable 0612 + case "java_generate_equals_and_hash": JavaGenerateEqualsAndHash = ctx.Tokens.ConsumeBoolean(); return true; +#pragma warning restore 0612 + case "java_generic_services": JavaGenericServices = ctx.Tokens.ConsumeBoolean(); return true; + case "java_multiple_files": JavaMultipleFiles = ctx.Tokens.ConsumeBoolean(); return true; + case "java_string_check_utf8": JavaStringCheckUtf8 = ctx.Tokens.ConsumeBoolean(); return true; + case "py_generic_services": PyGenericServices = ctx.Tokens.ConsumeBoolean(); return true; + + case "csharp_namespace": CsharpNamespace = ctx.Tokens.ConsumeString(); return true; + case "go_package": GoPackage = ctx.Tokens.ConsumeString(); return true; + case "java_outer_classname": JavaOuterClassname = ctx.Tokens.ConsumeString(); return true; + case "java_package": JavaPackage = ctx.Tokens.ConsumeString(); return true; + case "objc_class_prefix": ObjcClassPrefix = ctx.Tokens.ConsumeString(); return true; + case "php_class_prefix": PhpClassPrefix = ctx.Tokens.ConsumeString(); return true; + case "swift_prefix": SwiftPrefix = ctx.Tokens.ConsumeString(); return true; + + default: return false; + } + } + public byte[] ExtensionData + { + get { return DescriptorProto.GetExtensionData(this); } + set { DescriptorProto.SetExtensionData(this, value); } + } + } + +#pragma warning restore CS1591 +} +namespace ProtoBuf.Reflection +{ + internal static class ErrorExtensions + { + public static void Warn(this List errors, Token token, string message) + => errors.Add(new Error(token, message, false)); + public static void Error(this List errors, Token token, string message) + => errors.Add(new Error(token, message, true)); + public static void Error(this List errors, ParserException ex) + => errors.Add(new Error(ex)); + } + + /// + /// Describes a generated file + /// + public class CodeFile + { + /// + /// Get a string representation of this instance + /// + /// + public override string ToString() => Name; + /// + /// Create a new CodeFile instance + /// + public CodeFile(string name, string text) + { + Name = name; + Text = text; + } + /// + /// The name (including path if necessary) of this file + /// + public string Name { get; } + /// + /// The contents of this file + /// + public string Text { get; } + } + + /// + /// Represents the overall result of a compilation process + /// + public class CompilerResult + { + internal CompilerResult(Error[] errors, CodeFile[] files) + { + Errors = errors; + Files = files; + } + /// + /// The errors from this execution + /// + public Error[] Errors { get; } + /// + /// The output files from this execution + /// + public CodeFile[] Files { get; } + } + + internal class Import + { + public override string ToString() => Path; + public string Path { get; set; } + public bool IsPublic { get; set; } + public Token Token { get; set; } + public bool Used { get; set; } + } + /// + /// Describes an error that occurred during processing + /// + public class Error + { + /// + /// Parse an error from a PROTOC error message + /// + public static Error[] Parse(string stdout, string stderr) + { + if (string.IsNullOrWhiteSpace(stdout) && string.IsNullOrWhiteSpace(stderr)) + return noErrors; + + List errors = new List(); + using (var reader = new StringReader(stdout)) + { + Add(reader, errors); + } + using (var reader = new StringReader(stderr)) + { + Add(reader, errors); + } + return errors.ToArray(); + } + static void Add(TextReader lines, List errors) + { + string line; + while ((line = lines.ReadLine()) != null) + { + var s = line; + bool isError = true; + int lineNumber = 1, columnNumber = 1; + if (s[0] == '[') + { + int i = s.IndexOf(']'); + if (i > 0) + { + var prefix = line.Substring(1, i).Trim(); + s = line.Substring(i + 1).Trim(); + if (prefix.IndexOf("WARNING", StringComparison.OrdinalIgnoreCase) >= 0 + && prefix.IndexOf("ERROR", StringComparison.OrdinalIgnoreCase) < 0) + { + isError = false; + } + } + } + var match = Regex.Match(s, @"^([^:]+):([0-9]+):([0-9]+):\s+"); + string file = ""; + if (match.Success) + { + file = match.Groups[1].Value; + if (!int.TryParse(match.Groups[2].Value, NumberStyles.Integer, CultureInfo.InvariantCulture, out lineNumber)) + lineNumber = 1; + if (!int.TryParse(match.Groups[3].Value, NumberStyles.Integer, CultureInfo.InvariantCulture, out columnNumber)) + columnNumber = 1; + s = s.Substring(match.Length).Trim(); + } + errors.Add(new Error(new Token(" ", lineNumber, columnNumber, TokenType.None, "", 0, file), s, isError)); + } + } + internal string ToString(bool includeType) => Text.Length == 0 + ? $"{File}({LineNumber},{ColumnNumber}): {(includeType ? (IsError ? "error: " : "warning: ") : "")}{Message}" + : $"{File}({LineNumber},{ColumnNumber},{LineNumber},{ColumnNumber + Text.Length}): {(includeType ? (IsError ? "error: " : "warning: ") : "")}{Message}"; + /// + /// Get a text representation of this instance + /// + /// + public override string ToString() => ToString(true); + + internal static Error[] GetArray(List errors) + => errors.Count == 0 ? noErrors : errors.ToArray(); + + private static readonly Error[] noErrors = new Error[0]; + + internal Error(Token token, string message, bool isError) + { + ColumnNumber = token.ColumnNumber; + LineNumber = token.LineNumber; + File = token.File; + LineContents = token.LineContents; + Message = message; + IsError = isError; + Text = token.Value; + } + internal Error(ParserException ex) + { + ColumnNumber = ex.ColumnNumber; + LineNumber = ex.LineNumber; + File = ex.File; + LineContents = ex.LineContents; + Message = ex.Message; + IsError = ex.IsError; + Text = ex.Text ?? ""; + } + /// + /// True if this instance represents a non-fatal warning + /// + public bool IsWarning => !IsError; + /// + /// True if this instance represents a fatal error + /// + public bool IsError { get; } + /// + /// The file in which this error was identified + /// + public string File { get; } + /// + /// The source text relating to this error + /// + public string Text { get; } + /// + /// The error message + /// + public string Message { get; } + /// + /// The entire line contents in the source in which this error was located + /// + public string LineContents { get; } + /// + /// The line number in which this error was located + /// + public int LineNumber { get; } + /// + /// The column number in which this error was located + /// + public int ColumnNumber { get; } + } + enum AbortState + { + None, Statement, Object + } + interface ISchemaOptions + { + List UninterpretedOptions { get; } + bool Deprecated { get; set; } + bool ReadOne(ParserContext ctx, string key); + byte[] ExtensionData { get; set; } + string Extendee { get; } + } + + interface IHazNames + { + IEnumerable GetNames(); + } + + interface ISchemaObject + { + void ReadOne(ParserContext ctx); + } + internal class ParserContext : IDisposable + { + public AbortState AbortState { get; set; } + private void ReadOne(T obj) where T : class, ISchemaObject + { + AbortState oldState = AbortState; + AbortState = AbortState.None; + Token stateBefore; + if (!Tokens.Peek(out stateBefore)) return; + + try + { + obj.ReadOne(this); + } + catch (ParserException ex) + { + Errors.Error(ex); + } + finally + { + var state = AbortState; + Token stateAfter; + if (Tokens.Peek(out stateAfter) && stateBefore == stateAfter) + { + // we didn't move! avoid looping forever failing to do the same thing + Errors.Error(stateAfter, "unknown error"); + state = stateAfter.Is(TokenType.Symbol, "}") + ? AbortState.Object : AbortState.Statement; + } + AbortState = oldState; + switch (state) + { + case AbortState.Object: + Tokens.SkipToEndObject(); + break; + case AbortState.Statement: + Tokens.SkipToEndStatement(); + break; + } + } + } + public void Fill(T obj) where T : class, ISchemaObject + { + var tokens = Tokens; + Token token; + while (tokens.Peek(out token)) + { + if (tokens.ConsumeIf(TokenType.Symbol, ";")) + { } + else + { + ReadOne(obj); + } + } + } + static readonly char[] Period = { '.' }; + private void ReadOption(ref T obj, ISchemaObject parent, List existingNameParts = null) where T : class, ISchemaOptions, new() + { + var tokens = Tokens; + bool isBlock = existingNameParts != null; + var nameParts = isBlock + ? new List(existingNameParts) // create a clone we can append to + : new List(); + + do + { + if (nameParts.Count != 0) tokens.ConsumeIf(TokenType.AlphaNumeric, "."); + + bool isExtension = tokens.ConsumeIf(TokenType.Symbol, isBlock ? "[" : "("); + string key = tokens.Consume(TokenType.AlphaNumeric); + var keyToken = tokens.Previous; + if (isExtension) tokens.Consume(TokenType.Symbol, isBlock ? "]" : ")"); + + if (!isExtension && key.StartsWith(".")) + { + key = key.TrimStart(Period); + } + + key = key.Trim(); + if (isExtension || nameParts.Count == 0 || key.IndexOf('.') < 0) + { + var name = new UninterpretedOption.NamePart { IsExtension = isExtension, name_part = key, Token = keyToken }; + nameParts.Add(name); + } + else + { + foreach (var part in key.Split(Period, StringSplitOptions.RemoveEmptyEntries)) + { + var name = new UninterpretedOption.NamePart { IsExtension = false, name_part = part, Token = keyToken }; + nameParts.Add(name); + } + } + } while (!( + (isBlock && tokens.Is(TokenType.Symbol, "{")) + || tokens.ConsumeIf(TokenType.Symbol, isBlock ? ":" : "="))); + + if (tokens.ConsumeIf(TokenType.Symbol, "{")) + { + if (obj == null) obj = new T(); + bool any = false; + while (!tokens.ConsumeIf(TokenType.Symbol, "}")) + { + ReadOption(ref obj, parent, nameParts); + any = true; + } + if (!any) + { + var newOption = new UninterpretedOption(); + newOption.Names.AddRange(nameParts); + obj.UninterpretedOptions.Add(newOption); + } + } + else + { + + var field = parent as FieldDescriptorProto; + bool isField = typeof(T) == typeof(FieldOptions) && field != null; + var singleKey = (nameParts.Count == 1 && !nameParts[0].IsExtension) ? nameParts[0].name_part : null; + if (singleKey == "default" && isField) + { + string defaultValue = tokens.ConsumeString(field.type == FieldDescriptorProto.Type.TypeBytes); + nameParts[0].Token.RequireProto2(this); + ParseDefault(tokens.Previous, field.type, ref defaultValue); + if (defaultValue != null) + { + field.DefaultValue = defaultValue; + } + } + else if (singleKey == "json_name" && isField) + { + string jsonName = tokens.ConsumeString(); + field.JsonName = jsonName; + } + else + { + if (obj == null) obj = new T(); + if (singleKey == "deprecated") + { + obj.Deprecated = tokens.ConsumeBoolean(); + } + else if (singleKey == null || !obj.ReadOne(this, singleKey)) + { + var newOption = new UninterpretedOption + { + AggregateValue = tokens.ConsumeString(), + Token = tokens.Previous + }; + newOption.Names.AddRange(nameParts); + obj.UninterpretedOptions.Add(newOption); + } + } + } + } + + private void ParseDefault(Token token, FieldDescriptorProto.Type type, ref string defaultValue) + { + switch (type) + { + case FieldDescriptorProto.Type.TypeBool: + switch (defaultValue) + { + case "true": + case "false": + break; + default: + Errors.Error(token, "expected 'true' or 'false'"); + break; + } + break; + case FieldDescriptorProto.Type.TypeDouble: + switch (defaultValue) + { + case "inf": + case "-inf": + case "nan": + break; + default: + double val; + if (TokenExtensions.TryParseDouble(defaultValue, out val)) + { + defaultValue = Format(val); + } + else + { + Errors.Error(token, "invalid floating-point number"); + } + break; + } + break; + case FieldDescriptorProto.Type.TypeFloat: + switch (defaultValue) + { + case "inf": + case "-inf": + case "nan": + break; + default: + float val; + if (TokenExtensions.TryParseSingle(defaultValue, out val)) + { + defaultValue = Format(val); + } + else + { + Errors.Error(token, "invalid floating-point number"); + } + break; + } + break; + case FieldDescriptorProto.Type.TypeSfixed32: + case FieldDescriptorProto.Type.TypeInt32: + case FieldDescriptorProto.Type.TypeSint32: + { + int val; + if (TokenExtensions.TryParseInt32(defaultValue, out val)) + { + defaultValue = val.ToString(CultureInfo.InvariantCulture); + } + else + { + Errors.Error(token, "invalid integer"); + } + } + break; + case FieldDescriptorProto.Type.TypeFixed32: + case FieldDescriptorProto.Type.TypeUint32: + { + uint val; + if (TokenExtensions.TryParseUInt32(defaultValue, out val)) + { + defaultValue = val.ToString(CultureInfo.InvariantCulture); + } + else + { + Errors.Error(token, "invalid unsigned integer"); + } + } + break; + case FieldDescriptorProto.Type.TypeSfixed64: + case FieldDescriptorProto.Type.TypeInt64: + case FieldDescriptorProto.Type.TypeSint64: + { + long val; + if (TokenExtensions.TryParseInt64(defaultValue, out val)) + { + defaultValue = val.ToString(CultureInfo.InvariantCulture); + } + else + { + Errors.Error(token, "invalid integer"); + } + } + break; + case FieldDescriptorProto.Type.TypeFixed64: + case FieldDescriptorProto.Type.TypeUint64: + { + ulong val; + if (TokenExtensions.TryParseUInt64(defaultValue, out val)) + { + defaultValue = val.ToString(CultureInfo.InvariantCulture); + } + else + { + Errors.Error(token, "invalid unsigned integer"); + } + } + break; + case 0: + case FieldDescriptorProto.Type.TypeBytes: + case FieldDescriptorProto.Type.TypeString: + case FieldDescriptorProto.Type.TypeEnum: + break; + default: + Errors.Error(token, $"default value not handled: {type}={defaultValue}"); + break; + } + } + + static readonly char[] ExponentChars = { 'e', 'E' }; + static readonly string[] ExponentFormats = { "e0", "e1", "e2", "e3", "e4", "e5", "e6", "e7", "e8", "e9", "e10" }; + static string Format(float val) + { + string s = val.ToString(CultureInfo.InvariantCulture); + if (s.IndexOfAny(ExponentChars) < 0) return s; + + foreach (var format in ExponentFormats) + { + var tmp = val.ToString(format, CultureInfo.InvariantCulture); + float x; + if (float.TryParse(tmp, NumberStyles.Any, CultureInfo.InvariantCulture, out x) && x == val) return tmp; + } + return val.ToString("e", CultureInfo.InvariantCulture); + + } + + static string Format(double val) + { + string s = val.ToString(CultureInfo.InvariantCulture).ToUpperInvariant(); + if (s.IndexOfAny(ExponentChars) < 0) return s; + + foreach (var format in ExponentFormats) + { + var tmp = val.ToString(format, CultureInfo.InvariantCulture); + double x; + if (double.TryParse(tmp, NumberStyles.Any, CultureInfo.InvariantCulture, out x) && x == val) return tmp; + } + return val.ToString("e", CultureInfo.InvariantCulture); + } + + public T ParseOptionBlock(T obj, ISchemaObject parent = null) where T : class, ISchemaOptions, new() + { + var tokens = Tokens; + try + { + while (true) + { + if (tokens.ConsumeIf(TokenType.Symbol, "]")) + { + break; + } + else if (tokens.ConsumeIf(TokenType.Symbol, ",")) + { + } + else + { + ReadOption(ref obj, parent); + } + } + } + catch (ParserException ex) + { + Errors.Error(ex); + tokens.SkipToEndOptions(); + } + return obj; + } + public T ParseOptionStatement(T obj, ISchemaObject parent) where T : class, ISchemaOptions, new() + { + var tokens = Tokens; + try + { + ReadOption(ref obj, parent); + tokens.Consume(TokenType.Symbol, ";"); + } + catch (ParserException ex) + { + Errors.Error(ex); + tokens.SkipToEndStatement(); + } + return obj; + } + public bool TryReadObject(out T obj) where T : class, ISchemaObject, new() + { + obj = new T(); + return TryReadObjectImpl(obj); + } + internal bool TryReadObjectImpl(T obj) where T : class, ISchemaObject + { + var tokens = Tokens; + + try + { + tokens.Consume(TokenType.Symbol, "{"); + Token token; + while (tokens.Peek(out token) && !token.Is(TokenType.Symbol, "}")) + { + if (tokens.ConsumeIf(TokenType.Symbol, ";")) + { } + else + { + ReadOne(obj); + } + } + tokens.Consume(TokenType.Symbol, "}"); + return true; + } + catch (ParserException ex) + { + Errors.Error(ex); + tokens.SkipToEndObject(); + } + obj = null; + return false; + } + public ParserContext(FileDescriptorProto file, Peekable tokens, List errors) + { + Tokens = tokens; + Errors = errors; + _file = file; + } + + public string Syntax + { + get + { + var syntax = _file.Syntax; + return string.IsNullOrEmpty(syntax) ? FileDescriptorProto.SyntaxProto2 : syntax; + } + } + + private readonly FileDescriptorProto _file; + public Peekable Tokens { get; } + public List Errors { get; } + + public void Dispose() { Tokens?.Dispose(); } + + internal void CheckNames(IHazNames parent, string name, Token token +#if DEBUG && NETSTANDARD1_3 + , [System.Runtime.CompilerServices.CallerMemberName] string caller = null +#endif + ) + { + if (parent != null && parent.GetNames().Contains(name)) + { + Errors.Error(token, $"name '{name}' is already in use" +#if DEBUG && NETSTANDARD1_3 + + $" ({caller})" +#endif + ); + } + } + } +} diff --git a/Editor/protobuf-net.Reflection/Parsers.cs.meta b/Editor/protobuf-net.Reflection/Parsers.cs.meta new file mode 100644 index 0000000..e62bd49 --- /dev/null +++ b/Editor/protobuf-net.Reflection/Parsers.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: e55e7fdae8bab4a52bbc61d0bb88cb6f +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/Peekable.cs b/Editor/protobuf-net.Reflection/Peekable.cs new file mode 100644 index 0000000..87341e2 --- /dev/null +++ b/Editor/protobuf-net.Reflection/Peekable.cs @@ -0,0 +1,55 @@ +using System; +using System.Collections.Generic; + +namespace ProtoBuf.Reflection +{ + internal sealed class Peekable : IDisposable + { + public override string ToString() + { + T val; + return Peek(out val) ? (val?.ToString() ?? "(null)") : "(EOF)"; + } + private readonly IEnumerator _iter; + private T _peek, _prev; + private bool _havePeek, _eof; + public Peekable(IEnumerable sequence) + { + _iter = sequence.GetEnumerator(); + } + public T Previous => _prev; + public bool Consume() + { + T val; + bool haveData = _havePeek || Peek(out val); + _prev = _peek; + _havePeek = false; + return haveData; + } + public bool Peek(out T next) + { + if (!_havePeek) + { + if (_iter.MoveNext()) + { + _prev = _peek; + _peek = _iter.Current; + _havePeek = true; + } + else + { + _eof = true; + _havePeek = false; + } + } + if (_eof) + { + next = default(T); + return false; + } + next = _peek; + return true; + } + public void Dispose() => _iter?.Dispose(); + } +} diff --git a/Editor/protobuf-net.Reflection/Peekable.cs.meta b/Editor/protobuf-net.Reflection/Peekable.cs.meta new file mode 100644 index 0000000..5422d06 --- /dev/null +++ b/Editor/protobuf-net.Reflection/Peekable.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 7a3c973520c394370a13670e5eff7ae2 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/Token.cs b/Editor/protobuf-net.Reflection/Token.cs new file mode 100644 index 0000000..bc34d04 --- /dev/null +++ b/Editor/protobuf-net.Reflection/Token.cs @@ -0,0 +1,83 @@ +using Google.Protobuf.Reflection; +using System; + +namespace ProtoBuf.Reflection +{ + internal struct Token + { + + public static bool operator ==(Token x, Token y) + { + return x.Offset == y.Offset && x.File == y.File; + } + public static bool operator !=(Token x, Token y) + { + return x.Offset != y.Offset || x.File != y.File; + } + public override int GetHashCode() => Offset; + public override bool Equals(object obj) => (obj is Token) && ((Token)obj).Offset == this.Offset; + public bool Equals(Token token) => token.Offset == this.Offset; + public int Offset { get; } + public int LineNumber { get; } + public string File { get; } + public int ColumnNumber { get; } + public TokenType Type { get; } + public string Value { get; } + public string LineContents { get; } + internal Token(string value, int lineNumber, int columnNumber, TokenType type, string lineContents, int offset, string file) + { + Value = value; + LineNumber = lineNumber; + ColumnNumber = columnNumber; + File = file; + Type = type; + LineContents = lineContents; + Offset = offset; + } + public override string ToString() => $"({LineNumber},{ColumnNumber}) '{Value}'"; + + + internal Exception Throw(string error = null, bool isError = true) + { + throw new ParserException(this, string.IsNullOrWhiteSpace(error) ? $"syntax error: '{Value}'" : error, isError); + } + + internal void Assert(TokenType type, string value = null) + { + if (value != null) + { + if (type != Type || value != Value) + { + Throw($"expected {type} '{value}'"); + } + + } + else + { + if (type != Type) + { + Throw($"expected {type}"); + } + } + } + + internal bool Is(TokenType type, string value = null) + { + if (type != Type) return false; + if (value != null && value != Value) return false; + return true; + } + + internal void RequireProto2(ParserContext ctx) + { + if(ctx.Syntax != FileDescriptorProto.SyntaxProto2) + { + var msg = "'" + Value + "' requires " + FileDescriptorProto.SyntaxProto2 + " syntax"; + ctx.Errors.Error(this, msg); + } + } + + internal Error TypeNotFound(string typeName = null) => new Error(this, + $"type not found: '{(string.IsNullOrWhiteSpace(typeName) ? Value : typeName)}'", true); + } +} diff --git a/Editor/protobuf-net.Reflection/Token.cs.meta b/Editor/protobuf-net.Reflection/Token.cs.meta new file mode 100644 index 0000000..b4ca2ad --- /dev/null +++ b/Editor/protobuf-net.Reflection/Token.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: aea55379c2cfd4ea8a17e19afd57938c +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/TokenExtensions.cs b/Editor/protobuf-net.Reflection/TokenExtensions.cs new file mode 100644 index 0000000..d11fc66 --- /dev/null +++ b/Editor/protobuf-net.Reflection/TokenExtensions.cs @@ -0,0 +1,642 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Text; + +namespace ProtoBuf.Reflection +{ + internal static class TokenExtensions + { + public static bool Is(this Peekable tokens, TokenType type, string value = null) + { + Token val; + return tokens.Peek(out val) && val.Is(type, value); + } + + public static void Consume(this Peekable tokens, TokenType type, string value) + { + var token = tokens.Read(); + token.Assert(type, value); + tokens.Consume(); + } + public static bool ConsumeIf(this Peekable tokens, TokenType type, string value) + { + Token token; + if (tokens.Peek(out token) && token.Is(type, value)) + { + tokens.Consume(); + return true; + } + return false; + } + + public static Token Read(this Peekable tokens) + { + Token val; + if (!tokens.Peek(out val)) + { + throw new ParserException(tokens.Previous, "Unexpected end of file", true); + } + return val; + } + public static bool SkipToEndOptions(this Peekable tokens) + { + Token token; + while (tokens.Peek(out token)) + { + if (token.Is(TokenType.Symbol, ";") || token.Is(TokenType.Symbol, "}")) + return true; // but don't consume + + tokens.Consume(); + if (token.Is(TokenType.Symbol, "]")) + return true; + } + return false; + } + public static bool SkipToEndStatement(this Peekable tokens) + { + Token token; + while (tokens.Peek(out token)) + { + if (token.Is(TokenType.Symbol, "}")) + return true; // but don't consume + + tokens.Consume(); + if (token.Is(TokenType.Symbol, ";")) + return true; + } + return false; + } + public static bool SkipToEndObject(this Peekable tokens) => SkipToSymbol(tokens, "}"); + private static bool SkipToSymbol(this Peekable tokens, string symbol) + { + Token token; + while (tokens.Peek(out token)) + { + tokens.Consume(); + if (token.Is(TokenType.Symbol, symbol)) + return true; + } + return false; + } + public static bool SkipToEndStatementOrObject(this Peekable tokens) + { + Token token; + while (tokens.Peek(out token)) + { + tokens.Consume(); + if (token.Is(TokenType.Symbol, "}") || token.Is(TokenType.Symbol, ";")) + return true; + } + return false; + } + public static string Consume(this Peekable tokens, TokenType type) + { + var token = tokens.Read(); + token.Assert(type); + string s = token.Value; + tokens.Consume(); + return s; + } + + static class EnumCache + { + private static readonly Dictionary lookup; + public static bool TryGet(string name, out T value) => lookup.TryGetValue(name, out value); + static EnumCache() + { + var fields = typeof(T).GetFields(BindingFlags.Static | BindingFlags.Public); + var tmp = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var field in fields) + { + string name = field.Name; + var attrib = (ProtoEnumAttribute)field.GetCustomAttributes(false).FirstOrDefault(); + if (!string.IsNullOrWhiteSpace(attrib?.Name)) name = attrib.Name; + var val = (T)field.GetValue(null); + tmp.Add(name, val); + } + lookup = tmp; + } + } + internal static T ConsumeEnum(this Peekable tokens, bool ignoreCase = true) where T : struct + { + var token = tokens.Read(); + var value = tokens.ConsumeString(); + + T val; + if (!EnumCache.TryGet(token.Value, out val)) + token.Throw("Unable to parse " + typeof(T).Name); + return val; + } + internal static bool TryParseUInt32(string token, out uint val, uint? max = null) + { + if (max.HasValue && token == "max") + { + val = max.GetValueOrDefault(); + return true; + } + + if (token.StartsWith("0x", StringComparison.OrdinalIgnoreCase) && uint.TryParse(token.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out val)) + { + return true; + } + + return uint.TryParse(token, NumberStyles.Integer | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out val); + } + internal static bool TryParseUInt64(string token, out ulong val, ulong? max = null) + { + if (max.HasValue && token == "max") + { + val = max.GetValueOrDefault(); + return true; + } + + if (token.StartsWith("0x", StringComparison.OrdinalIgnoreCase) && ulong.TryParse(token.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out val)) + { + return true; + } + + return ulong.TryParse(token, NumberStyles.Integer | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out val); + } + internal static bool TryParseInt32(string token, out int val, int? max = null) + { + if (max.HasValue && token == "max") + { + val = max.GetValueOrDefault(); + return true; + } + + if (token.StartsWith("-0x", StringComparison.OrdinalIgnoreCase) && int.TryParse(token.Substring(3), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out val)) + { + val = -val; + return true; + } + + if (token.StartsWith("0x", StringComparison.OrdinalIgnoreCase) && int.TryParse(token.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out val)) + { + return true; + } + + return int.TryParse(token, NumberStyles.Integer | NumberStyles.AllowLeadingSign | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out val); + } + internal static bool TryParseInt64(string token, out long val, long? max = null) + { + if (max.HasValue && token == "max") + { + val = max.GetValueOrDefault(); + return true; + } + + if (token.StartsWith("-0x", StringComparison.OrdinalIgnoreCase) && long.TryParse(token.Substring(3), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out val)) + { + val = -val; + return true; + } + + if (token.StartsWith("0x", StringComparison.OrdinalIgnoreCase) && long.TryParse(token.Substring(2), NumberStyles.AllowHexSpecifier, CultureInfo.InvariantCulture, out val)) + { + return true; + } + + return long.TryParse(token, NumberStyles.Integer | NumberStyles.AllowLeadingSign | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out val); + } + internal static int ConsumeInt32(this Peekable tokens, int? max = null) + { + var token = tokens.Read(); + token.Assert(TokenType.AlphaNumeric); + tokens.Consume(); + int val; + if (TryParseInt32(token.Value, out val, max)) return val; + throw token.Throw("Unable to parse integer"); + } + + internal static string ConsumeString(this Peekable tokens, bool asBytes = false) + { + var token = tokens.Read(); + switch (token.Type) + { + case TokenType.StringLiteral: + MemoryStream ms = null; + do + { + ReadStringBytes(ref ms, token.Value); + tokens.Consume(); + } while (tokens.Peek(out token) && token.Type == TokenType.StringLiteral); // literal concat is a thing + if (ms == null) return ""; + + if (!asBytes) + { +#if NETSTANDARD1_3 + string s = ms.TryGetBuffer(out var segment) + ? Encoding.UTF8.GetString(segment.Array, segment.Offset, segment.Count) + : Encoding.UTF8.GetString(ms.ToArray()); + +#else + string s = Encoding.UTF8.GetString(ms.GetBuffer(), 0, (int)ms.Length); +#endif + return s.Replace("\\", @"\\") + .Replace("\'", @"\'") + .Replace("\"", @"\""") + .Replace("\r", @"\r") + .Replace("\n", @"\n") + .Replace("\t", @"\t"); + } + + var sb = new StringBuilder((int)ms.Length); + int b; + ms.Position = 0; + while ((b = ms.ReadByte()) >= 0) + { + switch (b) + { + case '\n': sb.Append(@"\n"); break; + case '\r': sb.Append(@"\r"); break; + case '\t': sb.Append(@"\t"); break; + case '\'': sb.Append(@"\'"); break; + case '\"': sb.Append(@"\"""); break; + case '\\': sb.Append(@"\\"); break; + default: + if (b >= 32 && b < 127) + { + sb.Append((char)b); + } + else + { + // encode as 3-part octal + sb.Append('\\') + .Append((char)(((b >> 6) & 7) + (int)'0')) + .Append((char)(((b >> 3) & 7) + (int)'0')) + .Append((char)(((b >> 0) & 7) + (int)'0')); + } + break; + } + } + return sb.ToString(); + case TokenType.AlphaNumeric: + tokens.Consume(); + return token.Value; + default: + throw token.Throw(); + } + } + internal static void AppendAscii(MemoryStream target, string ascii) + { + foreach (char c in ascii) + target.WriteByte(checked((byte)c)); + } + internal static void AppendByte(MemoryStream target, ref uint codePoint, ref int len) + { + if (len != 0) + { + target.WriteByte(checked((byte)codePoint)); + } + codePoint = 0; + len = 0; + } + internal static unsafe void AppendNormalized(MemoryStream target, ref uint codePoint, ref int len) + { + if (len == 0) + { + codePoint = 0; + return; + } + byte* b = stackalloc byte[10]; + char c = checked((char)codePoint); + int count = Encoding.UTF8.GetBytes(&c, 1, b, 10); + for (int i = 0; i < count; i++) + { + target.WriteByte(b[i]); + } + } + internal static void AppendEscaped(MemoryStream target, char c) + { + uint codePoint; + switch (c) + { + // encoded as octal + case 'a': codePoint = '\a'; break; + case 'b': codePoint = '\b'; break; + case 'f': codePoint = '\f'; break; + case 'v': codePoint = '\v'; break; + case 't': codePoint = '\t'; break; + case 'n': codePoint = '\n'; break; + case 'r': codePoint = '\r'; break; + + case '\\': + case '?': + case '\'': + case '\"': + codePoint = c; + break; + default: + codePoint = '?'; + break; + } + int len = 1; + AppendNormalized(target, ref codePoint, ref len); + } + internal static bool GetHexValue(char c, out uint val, ref int len) + { + len++; + if (c >= '0' && c <= '9') + { + val = (uint)c - (uint)'0'; + return true; + } + if (c >= 'a' && c <= 'f') + { + val = 10 + (uint)c - (uint)'a'; + return true; + } + if (c >= 'A' && c <= 'F') + { + val = 10 + (uint)c - (uint)'A'; + return true; + } + len--; + val = 0; + return false; + } + // the normalized output *includes* the slashes, but expands octal to 3 places; + // it is the job of codegen to change this normalized form to the target language form + internal static void ReadStringBytes(ref MemoryStream ms, string value) + { + const int STATE_NORMAL = 0, STATE_ESCAPE = 1, STATE_OCTAL = 2, STATE_HEX = 3; + int state = STATE_NORMAL; + if (value == null || value.Length == 0) return; + + if (ms == null) ms = new MemoryStream(value.Length); + uint escapedCodePoint = 0; + int escapeLength = 0; + foreach (char c in value) + { + switch (state) + { + case STATE_ESCAPE: + if (c >= '0' && c <= '7') + { + state = STATE_OCTAL; + GetHexValue(c, out escapedCodePoint, ref escapeLength); // not a typo; all 1-char octal values are also the same in hex + } + else if (c == 'x') + { + state = STATE_HEX; + } + else if (c == 'u' || c == 'U') + { + throw new NotSupportedException("Unicode escape points: on my todo list"); + } + else + { + state = STATE_NORMAL; + AppendEscaped(ms, c); + } + break; + case STATE_OCTAL: + if (c >= '0' && c <= '7') + { + uint x; + GetHexValue(c, out x, ref escapeLength); + escapedCodePoint = (escapedCodePoint << 3) | x; + if (escapeLength == 3) + { + AppendByte(ms, ref escapedCodePoint, ref escapeLength); + state = STATE_NORMAL; + } + } + else + { + // not an octal char - regular append + if (escapeLength == 0) + { + // include the malformed \x + AppendAscii(ms, @"\x"); + } + else + { + AppendByte(ms, ref escapedCodePoint, ref escapeLength); + } + state = STATE_NORMAL; + goto case STATE_NORMAL; + } + break; + case STATE_HEX: + { + uint x; + if (GetHexValue(c, out x, ref escapeLength)) + { + escapedCodePoint = (escapedCodePoint << 4) | x; + if (escapeLength == 2) + { + AppendByte(ms, ref escapedCodePoint, ref escapeLength); + state = STATE_NORMAL; + } + } + else + { + // not a hex char - regular append + AppendByte(ms, ref escapedCodePoint, ref escapeLength); + state = STATE_NORMAL; + goto case STATE_NORMAL; + } + } + break; + case STATE_NORMAL: + if (c == '\\') + { + state = STATE_ESCAPE; + } + else + { + uint codePoint = (uint)c; + int len = 1; + AppendNormalized(ms, ref codePoint, ref len); + } + break; + default: + throw new InvalidOperationException(); + } + } + // append any trailing escaped data + AppendByte(ms, ref escapedCodePoint, ref escapeLength); + } + + internal static bool ConsumeBoolean(this Peekable tokens) + { + var token = tokens.Read(); + token.Assert(TokenType.AlphaNumeric); + tokens.Consume(); + if (string.Equals("true", token.Value, StringComparison.OrdinalIgnoreCase)) return true; + if (string.Equals("false", token.Value, StringComparison.OrdinalIgnoreCase)) return false; + throw token.Throw("Unable to parse boolean"); + } + + static TokenType Identify(char c) + { + if (c == '"' || c == '\'') return TokenType.StringLiteral; + if (char.IsWhiteSpace(c)) return TokenType.Whitespace; + if (char.IsLetterOrDigit(c)) return TokenType.AlphaNumeric; + switch (c) + { + case '_': + case '.': + case '-': + return TokenType.AlphaNumeric; + } + return TokenType.Symbol; + } + + public static IEnumerable RemoveCommentsAndWhitespace(this IEnumerable tokens) + { + int commentLineNumber = -1; + bool isBlockComment = false; + foreach (var token in tokens) + { + if (isBlockComment) + { + // swallow everything until the end of the block comment + if (token.Is(TokenType.Symbol, "*/")) + isBlockComment = false; + } + else if (commentLineNumber == token.LineNumber) + { + // swallow everything else on that line + } + else if (token.Is(TokenType.Whitespace)) + { + continue; + } + else if (token.Is(TokenType.Symbol, "//")) + { + commentLineNumber = token.LineNumber; + } + else if (token.Is(TokenType.Symbol, "/*")) + { + isBlockComment = true; + } + else + { + yield return token; + } + } + } + + static bool CanCombine(TokenType type, int len, char prev, char next) + => type != TokenType.Symbol + || (len == 1 && prev == '/' && (next == '/' || next == '*')) + || (len == 1 && prev == '*' && next == '/'); + + + public static IEnumerable Tokenize(this TextReader reader, string file) + { + var buffer = new StringBuilder(); + + int lineNumber = 0, offset = 0; + string line; + string lastLine = null; + while ((line = reader.ReadLine()) != null) + { + lastLine = line; + lineNumber++; + int columnNumber = 0, tokenStart = 1; + char lastChar = '\0', stringType = '\0'; + TokenType type = TokenType.None; + bool isEscaped = false; + foreach (char c in line) + { + columnNumber++; + if (type == TokenType.StringLiteral) + { + if (c == stringType && !isEscaped) + { + yield return new Token(buffer.ToString(), lineNumber, tokenStart, type, line, offset++, file); + buffer.Clear(); + type = TokenType.None; + } + else + { + buffer.Append(c); + isEscaped = !isEscaped && c == '\\'; // ends an existing escape or starts a new one + } + } + else + { + var newType = Identify(c); + if (newType == type && CanCombine(type, buffer.Length, lastChar, c)) + { + buffer.Append(c); + } + else + { + if (buffer.Length != 0) + { + yield return new Token(buffer.ToString(), lineNumber, tokenStart, type, line, offset++, file); + buffer.Clear(); + } + type = newType; + tokenStart = columnNumber; + if (newType == TokenType.StringLiteral) + { + stringType = c; + } + else + { + buffer.Append(c); + } + } + } + lastChar = c; + } + + if (buffer.Length != 0) + { + yield return new Token(buffer.ToString(), lineNumber, tokenStart, type, lastLine, offset++, file); + buffer.Clear(); + } + } + + } + internal static bool TryParseSingle(string token, out float val) + { + if (token == "nan") + { + val = float.NaN; + return true; + } + if (token == "inf") + { + val = float.PositiveInfinity; + return true; + } + if (token == "-inf") + { + val = float.NegativeInfinity; + return true; + } + return float.TryParse(token, NumberStyles.Number | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out val); + } + internal static bool TryParseDouble(string token, out double val) + { + if(token == "nan") + { + val = double.NaN; + return true; + } + if(token == "inf") + { + val = double.PositiveInfinity; + return true; + } + if(token == "-inf") + { + val = double.NegativeInfinity; + return true; + } + return double.TryParse(token, NumberStyles.Number | NumberStyles.AllowExponent, CultureInfo.InvariantCulture, out val); + } + } +} + diff --git a/Editor/protobuf-net.Reflection/TokenExtensions.cs.meta b/Editor/protobuf-net.Reflection/TokenExtensions.cs.meta new file mode 100644 index 0000000..6edaecc --- /dev/null +++ b/Editor/protobuf-net.Reflection/TokenExtensions.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 08776b0940a8d42c080f7880619608c4 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Editor/protobuf-net.Reflection/TokenType.cs b/Editor/protobuf-net.Reflection/TokenType.cs new file mode 100644 index 0000000..9945ff6 --- /dev/null +++ b/Editor/protobuf-net.Reflection/TokenType.cs @@ -0,0 +1,11 @@ +namespace ProtoBuf.Reflection +{ + internal enum TokenType + { + None, + Whitespace, + StringLiteral, + AlphaNumeric, + Symbol + } +} diff --git a/Editor/protobuf-net.Reflection/TokenType.cs.meta b/Editor/protobuf-net.Reflection/TokenType.cs.meta new file mode 100644 index 0000000..0f229db --- /dev/null +++ b/Editor/protobuf-net.Reflection/TokenType.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: c55473354eb3c4e338593d7b041132be +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/GuruKCP/README.md b/README.md similarity index 81% rename from Assets/GuruKCP/README.md rename to README.md index e25188c..14e5df4 100644 --- a/Assets/GuruKCP/README.md +++ b/README.md @@ -10,6 +10,10 @@ KCP是一个基于udp的快速可靠协议(rudp),能以比 TCP浪费10%-20%的 主要用于构建unity客户端网络层. +kcp库基于https://github.com/l42111996/csharp-kcp +protobuf-net库基于https://github.com/protobuf-net/protobuf-net +两个库均做了适当修改,以完美适配Unity + ## 安装和接入 diff --git a/Assets/GuruKCP/README.md.meta b/README.md.meta similarity index 100% rename from Assets/GuruKCP/README.md.meta rename to README.md.meta diff --git a/Assets/GuruKCP/Runtime.meta b/Runtime.meta similarity index 100% rename from Assets/GuruKCP/Runtime.meta rename to Runtime.meta diff --git a/Assets/GuruKCP/Runtime/GuruKCP.Runtime.asmdef b/Runtime/GuruKCP.Runtime.asmdef similarity index 78% rename from Assets/GuruKCP/Runtime/GuruKCP.Runtime.asmdef rename to Runtime/GuruKCP.Runtime.asmdef index 4086920..1c70771 100644 --- a/Assets/GuruKCP/Runtime/GuruKCP.Runtime.asmdef +++ b/Runtime/GuruKCP.Runtime.asmdef @@ -3,10 +3,8 @@ "rootNamespace": "Guru", "references": [], "includePlatforms": [], - "excludePlatforms": [ - "Editor" - ], - "allowUnsafeCode": false, + "excludePlatforms": [], + "allowUnsafeCode": true, "overrideReferences": false, "precompiledReferences": [], "autoReferenced": true, diff --git a/Assets/GuruKCP/Runtime/GuruKCP.Runtime.asmdef.meta b/Runtime/GuruKCP.Runtime.asmdef.meta similarity index 100% rename from Assets/GuruKCP/Runtime/GuruKCP.Runtime.asmdef.meta rename to Runtime/GuruKCP.Runtime.asmdef.meta diff --git a/Assets/GuruKCP/Runtime/GuruKCP.cs b/Runtime/GuruKCP.cs similarity index 100% rename from Assets/GuruKCP/Runtime/GuruKCP.cs rename to Runtime/GuruKCP.cs diff --git a/Assets/GuruKCP/Runtime/GuruKCP.cs.meta b/Runtime/GuruKCP.cs.meta similarity index 100% rename from Assets/GuruKCP/Runtime/GuruKCP.cs.meta rename to Runtime/GuruKCP.cs.meta diff --git a/Runtime/Protobuf-net.meta b/Runtime/Protobuf-net.meta new file mode 100644 index 0000000..c53e343 --- /dev/null +++ b/Runtime/Protobuf-net.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 22deed51a0f6d4236a6ae1960d426139 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/BclHelpers.cs b/Runtime/Protobuf-net/BclHelpers.cs new file mode 100644 index 0000000..bcff9a4 --- /dev/null +++ b/Runtime/Protobuf-net/BclHelpers.cs @@ -0,0 +1,712 @@ +using System; +using System.Reflection; +namespace ProtoBuf +{ + internal enum TimeSpanScale + { + Days = 0, + Hours = 1, + Minutes = 2, + Seconds = 3, + Milliseconds = 4, + Ticks = 5, + + MinMax = 15 + } + + /// + /// Provides support for common .NET types that do not have a direct representation + /// in protobuf, using the definitions from bcl.proto + /// + public static class BclHelpers + { + /// + /// Creates a new instance of the specified type, bypassing the constructor. + /// + /// The type to create + /// The new instance + /// If the platform does not support constructor-skipping + public static object GetUninitializedObject(Type type) + { +#if COREFX + object obj = TryGetUninitializedObjectWithFormatterServices(type); + if (obj != null) return obj; +#endif +#if PLAT_BINARYFORMATTER && !(COREFX || PROFILE259) + return System.Runtime.Serialization.FormatterServices.GetUninitializedObject(type); +#else + throw new NotSupportedException("Constructor-skipping is not supported on this platform"); +#endif + } + +#if COREFX // this is inspired by DCS: https://github.com/dotnet/corefx/blob/c02d33b18398199f6acc17d375dab154e9a1df66/src/System.Private.DataContractSerialization/src/System/Runtime/Serialization/XmlFormatReaderGenerator.cs#L854-L894 + static Func getUninitializedObject; + static internal object TryGetUninitializedObjectWithFormatterServices(Type type) + { + if (getUninitializedObject == null) + { + try { + var formatterServiceType = typeof(string).GetTypeInfo().Assembly.GetType("System.Runtime.Serialization.FormatterServices"); + if (formatterServiceType == null) + { + // fallback for .Net Core 3.0 + var formatterAssembly = Assembly.Load(new AssemblyName("System.Runtime.Serialization.Formatters")); + formatterServiceType = formatterAssembly.GetType("System.Runtime.Serialization.FormatterServices"); + } + MethodInfo method = formatterServiceType?.GetMethod("GetUninitializedObject", BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static); + if (method != null) + { + getUninitializedObject = (Func)method.CreateDelegate(typeof(Func)); + } + } + catch { /* best efforts only */ } + if(getUninitializedObject == null) getUninitializedObject = x => null; + } + return getUninitializedObject(type); + } +#endif + + const int FieldTimeSpanValue = 0x01, FieldTimeSpanScale = 0x02, FieldTimeSpanKind = 0x03; + + internal static readonly DateTime[] EpochOrigin = { + new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Unspecified), + new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc), + new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Local) + }; + + /// + /// The default value for dates that are following google.protobuf.Timestamp semantics + /// + private static readonly DateTime TimestampEpoch = EpochOrigin[(int)DateTimeKind.Utc]; + + /// + /// Writes a TimeSpan to a protobuf stream using protobuf-net's own representation, bcl.TimeSpan + /// + public static void WriteTimeSpan(TimeSpan timeSpan, ProtoWriter dest) + { + WriteTimeSpanImpl(timeSpan, dest, DateTimeKind.Unspecified); + } + + private static void WriteTimeSpanImpl(TimeSpan timeSpan, ProtoWriter dest, DateTimeKind kind) + { + if (dest == null) throw new ArgumentNullException(nameof(dest)); + long value; + switch (dest.WireType) + { + case WireType.String: + case WireType.StartGroup: + TimeSpanScale scale; + value = timeSpan.Ticks; + if (timeSpan == TimeSpan.MaxValue) + { + value = 1; + scale = TimeSpanScale.MinMax; + } + else if (timeSpan == TimeSpan.MinValue) + { + value = -1; + scale = TimeSpanScale.MinMax; + } + else if (value % TimeSpan.TicksPerDay == 0) + { + scale = TimeSpanScale.Days; + value /= TimeSpan.TicksPerDay; + } + else if (value % TimeSpan.TicksPerHour == 0) + { + scale = TimeSpanScale.Hours; + value /= TimeSpan.TicksPerHour; + } + else if (value % TimeSpan.TicksPerMinute == 0) + { + scale = TimeSpanScale.Minutes; + value /= TimeSpan.TicksPerMinute; + } + else if (value % TimeSpan.TicksPerSecond == 0) + { + scale = TimeSpanScale.Seconds; + value /= TimeSpan.TicksPerSecond; + } + else if (value % TimeSpan.TicksPerMillisecond == 0) + { + scale = TimeSpanScale.Milliseconds; + value /= TimeSpan.TicksPerMillisecond; + } + else + { + scale = TimeSpanScale.Ticks; + } + + SubItemToken token = ProtoWriter.StartSubItem(null, dest); + + if (value != 0) + { + ProtoWriter.WriteFieldHeader(FieldTimeSpanValue, WireType.SignedVariant, dest); + ProtoWriter.WriteInt64(value, dest); + } + if (scale != TimeSpanScale.Days) + { + ProtoWriter.WriteFieldHeader(FieldTimeSpanScale, WireType.Variant, dest); + ProtoWriter.WriteInt32((int)scale, dest); + } + if (kind != DateTimeKind.Unspecified) + { + ProtoWriter.WriteFieldHeader(FieldTimeSpanKind, WireType.Variant, dest); + ProtoWriter.WriteInt32((int)kind, dest); + } + ProtoWriter.EndSubItem(token, dest); + break; + case WireType.Fixed64: + ProtoWriter.WriteInt64(timeSpan.Ticks, dest); + break; + default: + throw new ProtoException("Unexpected wire-type: " + dest.WireType.ToString()); + } + } + + /// + /// Parses a TimeSpan from a protobuf stream using protobuf-net's own representation, bcl.TimeSpan + /// + public static TimeSpan ReadTimeSpan(ProtoReader source) + { + long ticks = ReadTimeSpanTicks(source, out DateTimeKind kind); + if (ticks == long.MinValue) return TimeSpan.MinValue; + if (ticks == long.MaxValue) return TimeSpan.MaxValue; + return TimeSpan.FromTicks(ticks); + } + + /// + /// Parses a TimeSpan from a protobuf stream using the standardized format, google.protobuf.Duration + /// + public static TimeSpan ReadDuration(ProtoReader source) + { + long seconds = 0; + int nanos = 0; + SubItemToken token = ProtoReader.StartSubItem(source); + int fieldNumber; + while ((fieldNumber = source.ReadFieldHeader()) > 0) + { + switch (fieldNumber) + { + case 1: + seconds = source.ReadInt64(); + break; + case 2: + nanos = source.ReadInt32(); + break; + default: + source.SkipField(); + break; + } + } + ProtoReader.EndSubItem(token, source); + return FromDurationSeconds(seconds, nanos); + } + + /// + /// Writes a TimeSpan to a protobuf stream using the standardized format, google.protobuf.Duration + /// + public static void WriteDuration(TimeSpan value, ProtoWriter dest) + { + var seconds = ToDurationSeconds(value, out int nanos); + WriteSecondsNanos(seconds, nanos, dest); + } + + private static void WriteSecondsNanos(long seconds, int nanos, ProtoWriter dest) + { + SubItemToken token = ProtoWriter.StartSubItem(null, dest); + if (seconds != 0) + { + ProtoWriter.WriteFieldHeader(1, WireType.Variant, dest); + ProtoWriter.WriteInt64(seconds, dest); + } + if (nanos != 0) + { + ProtoWriter.WriteFieldHeader(2, WireType.Variant, dest); + ProtoWriter.WriteInt32(nanos, dest); + } + ProtoWriter.EndSubItem(token, dest); + } + + /// + /// Parses a DateTime from a protobuf stream using the standardized format, google.protobuf.Timestamp + /// + public static DateTime ReadTimestamp(ProtoReader source) + { + // note: DateTime is only defined for just over 0000 to just below 10000; + // TimeSpan has a range of +/- 10,675,199 days === 29k years; + // so we can just use epoch time delta + return TimestampEpoch + ReadDuration(source); + } + + /// + /// Writes a DateTime to a protobuf stream using the standardized format, google.protobuf.Timestamp + /// + public static void WriteTimestamp(DateTime value, ProtoWriter dest) + { + var seconds = ToDurationSeconds(value - TimestampEpoch, out int nanos); + + if (nanos < 0) + { // from Timestamp.proto: + // "Negative second values with fractions must still have + // non -negative nanos values that count forward in time." + seconds--; + nanos += 1000000000; + } + WriteSecondsNanos(seconds, nanos, dest); + } + + static TimeSpan FromDurationSeconds(long seconds, int nanos) + { + + long ticks = checked((seconds * TimeSpan.TicksPerSecond) + + (nanos * TimeSpan.TicksPerMillisecond) / 1000000); + return TimeSpan.FromTicks(ticks); + } + + static long ToDurationSeconds(TimeSpan value, out int nanos) + { + nanos = (int)(((value.Ticks % TimeSpan.TicksPerSecond) * 1000000) + / TimeSpan.TicksPerMillisecond); + return value.Ticks / TimeSpan.TicksPerSecond; + } + + /// + /// Parses a DateTime from a protobuf stream + /// + public static DateTime ReadDateTime(ProtoReader source) + { + long ticks = ReadTimeSpanTicks(source, out DateTimeKind kind); + if (ticks == long.MinValue) return DateTime.MinValue; + if (ticks == long.MaxValue) return DateTime.MaxValue; + return EpochOrigin[(int)kind].AddTicks(ticks); + } + + /// + /// Writes a DateTime to a protobuf stream, excluding the Kind + /// + public static void WriteDateTime(DateTime value, ProtoWriter dest) + { + WriteDateTimeImpl(value, dest, false); + } + + /// + /// Writes a DateTime to a protobuf stream, including the Kind + /// + public static void WriteDateTimeWithKind(DateTime value, ProtoWriter dest) + { + WriteDateTimeImpl(value, dest, true); + } + + private static void WriteDateTimeImpl(DateTime value, ProtoWriter dest, bool includeKind) + { + if (dest == null) throw new ArgumentNullException(nameof(dest)); + TimeSpan delta; + switch (dest.WireType) + { + case WireType.StartGroup: + case WireType.String: + if (value == DateTime.MaxValue) + { + delta = TimeSpan.MaxValue; + includeKind = false; + } + else if (value == DateTime.MinValue) + { + delta = TimeSpan.MinValue; + includeKind = false; + } + else + { + delta = value - EpochOrigin[0]; + } + break; + default: + delta = value - EpochOrigin[0]; + break; + } + WriteTimeSpanImpl(delta, dest, includeKind ? value.Kind : DateTimeKind.Unspecified); + } + + private static long ReadTimeSpanTicks(ProtoReader source, out DateTimeKind kind) + { + kind = DateTimeKind.Unspecified; + switch (source.WireType) + { + case WireType.String: + case WireType.StartGroup: + SubItemToken token = ProtoReader.StartSubItem(source); + int fieldNumber; + TimeSpanScale scale = TimeSpanScale.Days; + long value = 0; + while ((fieldNumber = source.ReadFieldHeader()) > 0) + { + switch (fieldNumber) + { + case FieldTimeSpanScale: + scale = (TimeSpanScale)source.ReadInt32(); + break; + case FieldTimeSpanValue: + source.Assert(WireType.SignedVariant); + value = source.ReadInt64(); + break; + case FieldTimeSpanKind: + kind = (DateTimeKind)source.ReadInt32(); + switch (kind) + { + case DateTimeKind.Unspecified: + case DateTimeKind.Utc: + case DateTimeKind.Local: + break; // fine + default: + throw new ProtoException("Invalid date/time kind: " + kind.ToString()); + } + break; + default: + source.SkipField(); + break; + } + } + ProtoReader.EndSubItem(token, source); + switch (scale) + { + case TimeSpanScale.Days: + return value * TimeSpan.TicksPerDay; + case TimeSpanScale.Hours: + return value * TimeSpan.TicksPerHour; + case TimeSpanScale.Minutes: + return value * TimeSpan.TicksPerMinute; + case TimeSpanScale.Seconds: + return value * TimeSpan.TicksPerSecond; + case TimeSpanScale.Milliseconds: + return value * TimeSpan.TicksPerMillisecond; + case TimeSpanScale.Ticks: + return value; + case TimeSpanScale.MinMax: + switch (value) + { + case 1: return long.MaxValue; + case -1: return long.MinValue; + default: throw new ProtoException("Unknown min/max value: " + value.ToString()); + } + default: + throw new ProtoException("Unknown timescale: " + scale.ToString()); + } + case WireType.Fixed64: + return source.ReadInt64(); + default: + throw new ProtoException("Unexpected wire-type: " + source.WireType.ToString()); + } + } + + const int FieldDecimalLow = 0x01, FieldDecimalHigh = 0x02, FieldDecimalSignScale = 0x03; + + /// + /// Parses a decimal from a protobuf stream + /// + public static decimal ReadDecimal(ProtoReader reader) + { + ulong low = 0; + uint high = 0; + uint signScale = 0; + + int fieldNumber; + SubItemToken token = ProtoReader.StartSubItem(reader); + while ((fieldNumber = reader.ReadFieldHeader()) > 0) + { + switch (fieldNumber) + { + case FieldDecimalLow: low = reader.ReadUInt64(); break; + case FieldDecimalHigh: high = reader.ReadUInt32(); break; + case FieldDecimalSignScale: signScale = reader.ReadUInt32(); break; + default: reader.SkipField(); break; + } + + } + ProtoReader.EndSubItem(token, reader); + + int lo = (int)(low & 0xFFFFFFFFL), + mid = (int)((low >> 32) & 0xFFFFFFFFL), + hi = (int)high; + bool isNeg = (signScale & 0x0001) == 0x0001; + byte scale = (byte)((signScale & 0x01FE) >> 1); + return new decimal(lo, mid, hi, isNeg, scale); + } + + /// + /// Writes a decimal to a protobuf stream + /// + public static void WriteDecimal(decimal value, ProtoWriter writer) + { + int[] bits = decimal.GetBits(value); + ulong a = ((ulong)bits[1]) << 32, b = ((ulong)bits[0]) & 0xFFFFFFFFL; + ulong low = a | b; + uint high = (uint)bits[2]; + uint signScale = (uint)(((bits[3] >> 15) & 0x01FE) | ((bits[3] >> 31) & 0x0001)); + + SubItemToken token = ProtoWriter.StartSubItem(null, writer); + if (low != 0) + { + ProtoWriter.WriteFieldHeader(FieldDecimalLow, WireType.Variant, writer); + ProtoWriter.WriteUInt64(low, writer); + } + if (high != 0) + { + ProtoWriter.WriteFieldHeader(FieldDecimalHigh, WireType.Variant, writer); + ProtoWriter.WriteUInt32(high, writer); + } + if (signScale != 0) + { + ProtoWriter.WriteFieldHeader(FieldDecimalSignScale, WireType.Variant, writer); + ProtoWriter.WriteUInt32(signScale, writer); + } + ProtoWriter.EndSubItem(token, writer); + } + + const int FieldGuidLow = 1, FieldGuidHigh = 2; + /// + /// Writes a Guid to a protobuf stream + /// + public static void WriteGuid(Guid value, ProtoWriter dest) + { + byte[] blob = value.ToByteArray(); + + SubItemToken token = ProtoWriter.StartSubItem(null, dest); + if (value != Guid.Empty) + { + ProtoWriter.WriteFieldHeader(FieldGuidLow, WireType.Fixed64, dest); + ProtoWriter.WriteBytes(blob, 0, 8, dest); + ProtoWriter.WriteFieldHeader(FieldGuidHigh, WireType.Fixed64, dest); + ProtoWriter.WriteBytes(blob, 8, 8, dest); + } + ProtoWriter.EndSubItem(token, dest); + } + /// + /// Parses a Guid from a protobuf stream + /// + public static Guid ReadGuid(ProtoReader source) + { + ulong low = 0, high = 0; + int fieldNumber; + SubItemToken token = ProtoReader.StartSubItem(source); + while ((fieldNumber = source.ReadFieldHeader()) > 0) + { + switch (fieldNumber) + { + case FieldGuidLow: low = source.ReadUInt64(); break; + case FieldGuidHigh: high = source.ReadUInt64(); break; + default: source.SkipField(); break; + } + } + ProtoReader.EndSubItem(token, source); + if (low == 0 && high == 0) return Guid.Empty; + uint a = (uint)(low >> 32), b = (uint)low, c = (uint)(high >> 32), d = (uint)high; + return new Guid((int)b, (short)a, (short)(a >> 16), + (byte)d, (byte)(d >> 8), (byte)(d >> 16), (byte)(d >> 24), + (byte)c, (byte)(c >> 8), (byte)(c >> 16), (byte)(c >> 24)); + + } + + + private const int + FieldExistingObjectKey = 1, + FieldNewObjectKey = 2, + FieldExistingTypeKey = 3, + FieldNewTypeKey = 4, + FieldTypeName = 8, + FieldObject = 10; + + /// + /// Optional behaviours that introduce .NET-specific functionality + /// + [Flags] + public enum NetObjectOptions : byte + { + /// + /// No special behaviour + /// + None = 0, + /// + /// Enables full object-tracking/full-graph support. + /// + AsReference = 1, + /// + /// Embeds the type information into the stream, allowing usage with types not known in advance. + /// + DynamicType = 2, + /// + /// If false, the constructor for the type is bypassed during deserialization, meaning any field initializers + /// or other initialization code is skipped. + /// + UseConstructor = 4, + /// + /// Should the object index be reserved, rather than creating an object promptly + /// + LateSet = 8 + } + + /// + /// Reads an *implementation specific* bundled .NET object, including (as options) type-metadata, identity/re-use, etc. + /// + public static object ReadNetObject(object value, ProtoReader source, int key, Type type, NetObjectOptions options) + { + SubItemToken token = ProtoReader.StartSubItem(source); + int fieldNumber; + int newObjectKey = -1, newTypeKey = -1, tmp; + while ((fieldNumber = source.ReadFieldHeader()) > 0) + { + switch (fieldNumber) + { + case FieldExistingObjectKey: + tmp = source.ReadInt32(); + value = source.NetCache.GetKeyedObject(tmp); + break; + case FieldNewObjectKey: + newObjectKey = source.ReadInt32(); + break; + case FieldExistingTypeKey: + tmp = source.ReadInt32(); + type = (Type)source.NetCache.GetKeyedObject(tmp); + key = source.GetTypeKey(ref type); + break; + case FieldNewTypeKey: + newTypeKey = source.ReadInt32(); + break; + case FieldTypeName: + string typeName = source.ReadString(); + type = source.DeserializeType(typeName); + if (type == null) + { + throw new ProtoException("Unable to resolve type: " + typeName + " (you can use the TypeModel.DynamicTypeFormatting event to provide a custom mapping)"); + } + if (type == typeof(string)) + { + key = -1; + } + else + { + key = source.GetTypeKey(ref type); + if (key < 0) + throw new InvalidOperationException("Dynamic type is not a contract-type: " + type.Name); + } + break; + case FieldObject: + bool isString = type == typeof(string); + bool wasNull = value == null; + bool lateSet = wasNull && (isString || ((options & NetObjectOptions.LateSet) != 0)); + + if (newObjectKey >= 0 && !lateSet) + { + if (value == null) + { + source.TrapNextObject(newObjectKey); + } + else + { + source.NetCache.SetKeyedObject(newObjectKey, value); + } + if (newTypeKey >= 0) source.NetCache.SetKeyedObject(newTypeKey, type); + } + object oldValue = value; + if (isString) + { + value = source.ReadString(); + } + else + { + value = ProtoReader.ReadTypedObject(oldValue, key, source, type); + } + + if (newObjectKey >= 0) + { + if (wasNull && !lateSet) + { // this both ensures (via exception) that it *was* set, and makes sure we don't shout + // about changed references + oldValue = source.NetCache.GetKeyedObject(newObjectKey); + } + if (lateSet) + { + source.NetCache.SetKeyedObject(newObjectKey, value); + if (newTypeKey >= 0) source.NetCache.SetKeyedObject(newTypeKey, type); + } + } + if (newObjectKey >= 0 && !lateSet && !ReferenceEquals(oldValue, value)) + { + throw new ProtoException("A reference-tracked object changed reference during deserialization"); + } + if (newObjectKey < 0 && newTypeKey >= 0) + { // have a new type, but not a new object + source.NetCache.SetKeyedObject(newTypeKey, type); + } + break; + default: + source.SkipField(); + break; + } + } + if (newObjectKey >= 0 && (options & NetObjectOptions.AsReference) == 0) + { + throw new ProtoException("Object key in input stream, but reference-tracking was not expected"); + } + ProtoReader.EndSubItem(token, source); + + return value; + } + + /// + /// Writes an *implementation specific* bundled .NET object, including (as options) type-metadata, identity/re-use, etc. + /// + public static void WriteNetObject(object value, ProtoWriter dest, int key, NetObjectOptions options) + { + if (dest == null) throw new ArgumentNullException("dest"); + bool dynamicType = (options & NetObjectOptions.DynamicType) != 0, + asReference = (options & NetObjectOptions.AsReference) != 0; + WireType wireType = dest.WireType; + SubItemToken token = ProtoWriter.StartSubItem(null, dest); + bool writeObject = true; + if (asReference) + { + int objectKey = dest.NetCache.AddObjectKey(value, out bool existing); + ProtoWriter.WriteFieldHeader(existing ? FieldExistingObjectKey : FieldNewObjectKey, WireType.Variant, dest); + ProtoWriter.WriteInt32(objectKey, dest); + if (existing) + { + writeObject = false; + } + } + + if (writeObject) + { + if (dynamicType) + { + Type type = value.GetType(); + + if (!(value is string)) + { + key = dest.GetTypeKey(ref type); + if (key < 0) throw new InvalidOperationException("Dynamic type is not a contract-type: " + type.Name); + } + int typeKey = dest.NetCache.AddObjectKey(type, out bool existing); + ProtoWriter.WriteFieldHeader(existing ? FieldExistingTypeKey : FieldNewTypeKey, WireType.Variant, dest); + ProtoWriter.WriteInt32(typeKey, dest); + if (!existing) + { + ProtoWriter.WriteFieldHeader(FieldTypeName, WireType.String, dest); + ProtoWriter.WriteString(dest.SerializeType(type), dest); + } + + } + ProtoWriter.WriteFieldHeader(FieldObject, wireType, dest); + if (value is string) + { + ProtoWriter.WriteString((string)value, dest); + } + else + { + ProtoWriter.WriteObject(value, key, dest); + } + } + ProtoWriter.EndSubItem(token, dest); + } + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/BclHelpers.cs.meta b/Runtime/Protobuf-net/BclHelpers.cs.meta new file mode 100644 index 0000000..da3a37a --- /dev/null +++ b/Runtime/Protobuf-net/BclHelpers.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 5072fbed211eb9f43a3cd2805dd75ef7 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/BufferExtension.cs b/Runtime/Protobuf-net/BufferExtension.cs new file mode 100644 index 0000000..ea428dd --- /dev/null +++ b/Runtime/Protobuf-net/BufferExtension.cs @@ -0,0 +1,78 @@ +using System; +using System.IO; + +namespace ProtoBuf +{ + /// + /// Provides a simple buffer-based implementation of an extension object. + /// + public sealed class BufferExtension : IExtension, IExtensionResettable + { + private byte[] buffer; + + void IExtensionResettable.Reset() + { + buffer = null; + } + + int IExtension.GetLength() + { + return buffer == null ? 0 : buffer.Length; + } + + Stream IExtension.BeginAppend() + { + return new MemoryStream(); + } + + void IExtension.EndAppend(Stream stream, bool commit) + { + using (stream) + { + int len; + if (commit && (len = (int)stream.Length) > 0) + { + MemoryStream ms = (MemoryStream)stream; + + if (buffer == null) + { // allocate new buffer + buffer = ms.ToArray(); + } + else + { // resize and copy the data + // note: Array.Resize not available on CF + int offset = buffer.Length; + byte[] tmp = new byte[offset + len]; + Buffer.BlockCopy(buffer, 0, tmp, 0, offset); + +#if PORTABLE // no GetBuffer() - fine, we'll use Read instead + int bytesRead; + long oldPos = ms.Position; + ms.Position = 0; + while (len > 0 && (bytesRead = ms.Read(tmp, offset, len)) > 0) + { + len -= bytesRead; + offset += bytesRead; + } + if(len != 0) throw new EndOfStreamException(); + ms.Position = oldPos; +#else + Buffer.BlockCopy(Helpers.GetBuffer(ms), 0, tmp, offset, len); +#endif + buffer = tmp; + } + } + } + } + + Stream IExtension.BeginQuery() + { + return buffer == null ? Stream.Null : new MemoryStream(buffer); + } + + void IExtension.EndQuery(Stream stream) + { + using (stream) { } // just clean up + } + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/BufferExtension.cs.meta b/Runtime/Protobuf-net/BufferExtension.cs.meta new file mode 100644 index 0000000..4a39591 --- /dev/null +++ b/Runtime/Protobuf-net/BufferExtension.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a9cf66041a027e94892d5014c2b905b3 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/BufferPool.cs b/Runtime/Protobuf-net/BufferPool.cs new file mode 100644 index 0000000..8ad3d1a --- /dev/null +++ b/Runtime/Protobuf-net/BufferPool.cs @@ -0,0 +1,149 @@ +using System; + +namespace ProtoBuf +{ + internal sealed class BufferPool + { + internal static void Flush() + { + lock (Pool) + { + for (var i = 0; i < Pool.Length; i++) + Pool[i] = null; + } + } + + private BufferPool() { } + private const int POOL_SIZE = 20; + internal const int BUFFER_LENGTH = 1024; + private static readonly CachedBuffer[] Pool = new CachedBuffer[POOL_SIZE]; + + internal static byte[] GetBuffer() => GetBuffer(BUFFER_LENGTH); + + internal static byte[] GetBuffer(int minSize) + { + byte[] cachedBuff = GetCachedBuffer(minSize); + return cachedBuff ?? new byte[minSize]; + } + + internal static byte[] GetCachedBuffer(int minSize) + { + lock (Pool) + { + var bestIndex = -1; + byte[] bestMatch = null; + for (var i = 0; i < Pool.Length; i++) + { + var buffer = Pool[i]; + if (buffer == null || buffer.Size < minSize) + { + continue; + } + if (bestMatch != null && bestMatch.Length < buffer.Size) + { + continue; + } + + var tmp = buffer.Buffer; + if (tmp == null) + { + Pool[i] = null; + } + else + { + bestMatch = tmp; + bestIndex = i; + } + } + + if (bestIndex >= 0) + { + Pool[bestIndex] = null; + } + + return bestMatch; + } + } + + /// + /// https://docs.microsoft.com/en-us/dotnet/framework/configure-apps/file-schema/runtime/gcallowverylargeobjects-element + /// + private const int MaxByteArraySize = int.MaxValue - 56; + + internal static void ResizeAndFlushLeft(ref byte[] buffer, int toFitAtLeastBytes, int copyFromIndex, int copyBytes) + { + Helpers.DebugAssert(buffer != null); + Helpers.DebugAssert(toFitAtLeastBytes > buffer.Length); + Helpers.DebugAssert(copyFromIndex >= 0); + Helpers.DebugAssert(copyBytes >= 0); + + int newLength = buffer.Length * 2; + if (newLength < 0) + { + newLength = MaxByteArraySize; + } + + if (newLength < toFitAtLeastBytes) newLength = toFitAtLeastBytes; + + if (copyBytes == 0) + { + ReleaseBufferToPool(ref buffer); + } + + var newBuffer = GetCachedBuffer(toFitAtLeastBytes) ?? new byte[newLength]; + + if (copyBytes > 0) + { + Buffer.BlockCopy(buffer, copyFromIndex, newBuffer, 0, copyBytes); + ReleaseBufferToPool(ref buffer); + } + + buffer = newBuffer; + } + + internal static void ReleaseBufferToPool(ref byte[] buffer) + { + if (buffer == null) return; + + lock (Pool) + { + var minIndex = 0; + var minSize = int.MaxValue; + for (var i = 0; i < Pool.Length; i++) + { + var tmp = Pool[i]; + if (tmp == null || !tmp.IsAlive) + { + minIndex = 0; + break; + } + if (tmp.Size < minSize) + { + minIndex = i; + minSize = tmp.Size; + } + } + + Pool[minIndex] = new CachedBuffer(buffer); + } + + buffer = null; + } + + private class CachedBuffer + { + private readonly WeakReference _reference; + + public int Size { get; } + + public bool IsAlive => _reference.IsAlive; + public byte[] Buffer => (byte[])_reference.Target; + + public CachedBuffer(byte[] buffer) + { + Size = buffer.Length; + _reference = new WeakReference(buffer); + } + } + } +} diff --git a/Runtime/Protobuf-net/BufferPool.cs.meta b/Runtime/Protobuf-net/BufferPool.cs.meta new file mode 100644 index 0000000..2870b8c --- /dev/null +++ b/Runtime/Protobuf-net/BufferPool.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 423b228ed060b91458bc6d4e6aa0f570 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/CallbackAttributes.cs b/Runtime/Protobuf-net/CallbackAttributes.cs new file mode 100644 index 0000000..1adb8e5 --- /dev/null +++ b/Runtime/Protobuf-net/CallbackAttributes.cs @@ -0,0 +1,33 @@ +using System; +using System.ComponentModel; + +namespace ProtoBuf +{ + /// Specifies a method on the root-contract in an hierarchy to be invoked before serialization. + [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = false)] +#if !CF && !PORTABLE && !COREFX && !PROFILE259 + [ImmutableObject(true)] +#endif + public sealed class ProtoBeforeSerializationAttribute : Attribute { } + + /// Specifies a method on the root-contract in an hierarchy to be invoked after serialization. + [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = false)] +#if !CF && !PORTABLE && !COREFX && !PROFILE259 + [ImmutableObject(true)] +#endif + public sealed class ProtoAfterSerializationAttribute : Attribute { } + + /// Specifies a method on the root-contract in an hierarchy to be invoked before deserialization. + [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = false)] +#if !CF && !PORTABLE && !COREFX && !PROFILE259 + [ImmutableObject(true)] +#endif + public sealed class ProtoBeforeDeserializationAttribute : Attribute { } + + /// Specifies a method on the root-contract in an hierarchy to be invoked after deserialization. + [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = false)] +#if !CF && !PORTABLE && !COREFX && !PROFILE259 + [ImmutableObject(true)] +#endif + public sealed class ProtoAfterDeserializationAttribute : Attribute { } +} diff --git a/Runtime/Protobuf-net/CallbackAttributes.cs.meta b/Runtime/Protobuf-net/CallbackAttributes.cs.meta new file mode 100644 index 0000000..7cf81a4 --- /dev/null +++ b/Runtime/Protobuf-net/CallbackAttributes.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 53de2cb3784c9dd43aa6f30d7df072a4 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Compiler.meta b/Runtime/Protobuf-net/Compiler.meta new file mode 100644 index 0000000..9de78a6 --- /dev/null +++ b/Runtime/Protobuf-net/Compiler.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 2cdd9eb2afa3ed24480a6035f507aad4 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Compiler/CompilerContext.cs b/Runtime/Protobuf-net/Compiler/CompilerContext.cs new file mode 100644 index 0000000..6100200 --- /dev/null +++ b/Runtime/Protobuf-net/Compiler/CompilerContext.cs @@ -0,0 +1,1435 @@ +#if FEAT_COMPILER +//#define DEBUG_COMPILE +using System; +using System.Threading; +using ProtoBuf.Meta; +using ProtoBuf.Serializers; +using System.Reflection; +using System.Reflection.Emit; + +namespace ProtoBuf.Compiler +{ + internal readonly struct CodeLabel + { + public readonly Label Value; + public readonly int Index; + public CodeLabel(Label value, int index) + { + this.Value = value; + this.Index = index; + } + } + internal sealed class CompilerContext + { + public TypeModel Model => model; + + readonly DynamicMethod method; + static int next; + + internal CodeLabel DefineLabel() + { + CodeLabel result = new CodeLabel(il.DefineLabel(), nextLabel++); + return result; + } +#if DEBUG_COMPILE + static readonly string traceCompilePath; + static CompilerContext() + { + traceCompilePath = System.IO.Path.Combine(System.IO.Directory.GetCurrentDirectory(), + "TraceCompile.txt"); + Console.WriteLine("DEBUG_COMPILE enabled; writing to " + traceCompilePath); + } +#endif + [System.Diagnostics.Conditional("DEBUG_COMPILE")] + private void TraceCompile(string value) + { +#if DEBUG_COMPILE + if (!string.IsNullOrWhiteSpace(value)) + { + using (System.IO.StreamWriter sw = System.IO.File.AppendText(traceCompilePath)) + { + sw.WriteLine(value); + } + } +#endif + } + internal void MarkLabel(CodeLabel label) + { + il.MarkLabel(label.Value); + TraceCompile("#: " + label.Index); + } + + public static ProtoSerializer BuildSerializer(IProtoSerializer head, TypeModel model) + { + Type type = head.ExpectedType; + try + { + CompilerContext ctx = new CompilerContext(type, true, true, model, typeof(object)); + ctx.LoadValue(ctx.InputValue); + ctx.CastFromObject(type); + ctx.WriteNullCheckedTail(type, head, null); + ctx.Emit(OpCodes.Ret); + return (ProtoSerializer)ctx.method.CreateDelegate( + typeof(ProtoSerializer)); + } + catch (Exception ex) + { + string name = type.FullName; + if (string.IsNullOrEmpty(name)) name = type.Name; + throw new InvalidOperationException("It was not possible to prepare a serializer for: " + name, ex); + } + } + /*public static ProtoCallback BuildCallback(IProtoTypeSerializer head) + { + Type type = head.ExpectedType; + CompilerContext ctx = new CompilerContext(type, true, true); + using (Local typedVal = new Local(ctx, type)) + { + ctx.LoadValue(Local.InputValue); + ctx.CastFromObject(type); + ctx.StoreValue(typedVal); + CodeLabel[] jumpTable = new CodeLabel[4]; + for(int i = 0 ; i < jumpTable.Length ; i++) { + jumpTable[i] = ctx.DefineLabel(); + } + ctx.LoadReaderWriter(); + ctx.Switch(jumpTable); + ctx.Return(); + for(int i = 0 ; i < jumpTable.Length ; i++) { + ctx.MarkLabel(jumpTable[i]); + if (head.HasCallbacks((TypeModel.CallbackType)i)) + { + head.EmitCallback(ctx, typedVal, (TypeModel.CallbackType)i); + } + ctx.Return(); + } + } + + ctx.Emit(OpCodes.Ret); + return (ProtoCallback)ctx.method.CreateDelegate( + typeof(ProtoCallback)); + }*/ + public static ProtoDeserializer BuildDeserializer(IProtoSerializer head, TypeModel model) + { + Type type = head.ExpectedType; + CompilerContext ctx = new CompilerContext(type, false, true, model, typeof(object)); + + using (Local typedVal = new Local(ctx, type)) + { + if (!Helpers.IsValueType(type)) + { + ctx.LoadValue(ctx.InputValue); + ctx.CastFromObject(type); + ctx.StoreValue(typedVal); + } + else + { + ctx.LoadValue(ctx.InputValue); + CodeLabel notNull = ctx.DefineLabel(), endNull = ctx.DefineLabel(); + ctx.BranchIfTrue(notNull, true); + + ctx.LoadAddress(typedVal, type); + ctx.EmitCtor(type); + ctx.Branch(endNull, true); + + ctx.MarkLabel(notNull); + ctx.LoadValue(ctx.InputValue); + ctx.CastFromObject(type); + ctx.StoreValue(typedVal); + + ctx.MarkLabel(endNull); + } + head.EmitRead(ctx, typedVal); + + if (head.ReturnsValue) + { + ctx.StoreValue(typedVal); + } + + ctx.LoadValue(typedVal); + ctx.CastToObject(type); + } + ctx.Emit(OpCodes.Ret); + return (ProtoDeserializer)ctx.method.CreateDelegate( + typeof(ProtoDeserializer)); + } + + internal void Return() + { + Emit(OpCodes.Ret); + } + + static bool IsObject(Type type) + { + return type == typeof(object); + } + + internal void CastToObject(Type type) + { + if (IsObject(type)) + { } + else if (Helpers.IsValueType(type)) + { + il.Emit(OpCodes.Box, type); + TraceCompile(OpCodes.Box + ": " + type); + } + else + { + il.Emit(OpCodes.Castclass, MapType(typeof(object))); + TraceCompile(OpCodes.Castclass + ": " + type); + } + } + + internal void CastFromObject(Type type) + { + if (IsObject(type)) + { } + else if (Helpers.IsValueType(type)) + { + switch (MetadataVersion) + { + case ILVersion.Net1: + il.Emit(OpCodes.Unbox, type); + il.Emit(OpCodes.Ldobj, type); + TraceCompile(OpCodes.Unbox + ": " + type); + TraceCompile(OpCodes.Ldobj + ": " + type); + break; + default: + + il.Emit(OpCodes.Unbox_Any, type); + TraceCompile(OpCodes.Unbox_Any + ": " + type); + break; + } + } + else + { + il.Emit(OpCodes.Castclass, type); + TraceCompile(OpCodes.Castclass + ": " + type); + } + } + private readonly bool isStatic; + private readonly RuntimeTypeModel.SerializerPair[] methodPairs; + + internal MethodBuilder GetDedicatedMethod(int metaKey, bool read) + { + if (methodPairs == null) return null; + // but if we *do* have pairs, we demand that we find a match... + for (int i = 0; i < methodPairs.Length; i++) + { + if (methodPairs[i].MetaKey == metaKey) { return read ? methodPairs[i].Deserialize : methodPairs[i].Serialize; } + } + throw new ArgumentException("Meta-key not found", "metaKey"); + } + + internal int MapMetaKeyToCompiledKey(int metaKey) + { + if (metaKey < 0 || methodPairs == null) return metaKey; // all meta, or a dummy/wildcard key + + for (int i = 0; i < methodPairs.Length; i++) + { + if (methodPairs[i].MetaKey == metaKey) return i; + } + throw new ArgumentException("Key could not be mapped: " + metaKey.ToString(), "metaKey"); + } + + + private readonly bool isWriter; + + private readonly bool nonPublic; + internal bool NonPublic { get { return nonPublic; } } + + private readonly Local inputValue; + public Local InputValue { get { return inputValue; } } + + private readonly string assemblyName; + internal CompilerContext(ILGenerator il, bool isStatic, bool isWriter, RuntimeTypeModel.SerializerPair[] methodPairs, TypeModel model, ILVersion metadataVersion, string assemblyName, Type inputType, string traceName) + { + if (string.IsNullOrEmpty(assemblyName)) throw new ArgumentNullException(nameof(assemblyName)); + this.assemblyName = assemblyName; + this.isStatic = isStatic; + this.methodPairs = methodPairs ?? throw new ArgumentNullException(nameof(methodPairs)); + this.il = il ?? throw new ArgumentNullException(nameof(il)); + // nonPublic = false; <== implicit + this.isWriter = isWriter; + this.model = model ?? throw new ArgumentNullException(nameof(model)); + this.metadataVersion = metadataVersion; + if (inputType != null) this.inputValue = new Local(null, inputType); + TraceCompile(">> " + traceName); + } + + private CompilerContext(Type associatedType, bool isWriter, bool isStatic, TypeModel model, Type inputType) + { + metadataVersion = ILVersion.Net2; + this.isStatic = isStatic; + this.isWriter = isWriter; + this.model = model ?? throw new ArgumentNullException(nameof(model)); + nonPublic = true; + Type[] paramTypes; + Type returnType; + if (isWriter) + { + returnType = typeof(void); + paramTypes = new Type[] { typeof(object), typeof(ProtoWriter) }; + } + else + { + returnType = typeof(object); + paramTypes = new Type[] { typeof(object), typeof(ProtoReader) }; + } + int uniqueIdentifier; +#if PLAT_NO_INTERLOCKED + uniqueIdentifier = ++next; +#else + uniqueIdentifier = Interlocked.Increment(ref next); +#endif + method = new DynamicMethod("proto_" + uniqueIdentifier.ToString(), returnType, paramTypes, associatedType +#if COREFX + .GetTypeInfo() +#endif + .IsInterface ? typeof(object) : associatedType, true); + this.il = method.GetILGenerator(); + if (inputType != null) this.inputValue = new Local(null, inputType); + TraceCompile(">> " + method.Name); + } + + private readonly ILGenerator il; + + private void Emit(OpCode opcode) + { + il.Emit(opcode); + TraceCompile(opcode.ToString()); + } + + public void LoadValue(string value) + { + if (value == null) + { + LoadNullRef(); + } + else + { + il.Emit(OpCodes.Ldstr, value); + TraceCompile(OpCodes.Ldstr + ": " + value); + } + } + + public void LoadValue(float value) + { + il.Emit(OpCodes.Ldc_R4, value); + TraceCompile(OpCodes.Ldc_R4 + ": " + value); + } + + public void LoadValue(double value) + { + il.Emit(OpCodes.Ldc_R8, value); + TraceCompile(OpCodes.Ldc_R8 + ": " + value); + } + + public void LoadValue(long value) + { + il.Emit(OpCodes.Ldc_I8, value); + TraceCompile(OpCodes.Ldc_I8 + ": " + value); + } + + public void LoadValue(int value) + { + switch (value) + { + case 0: Emit(OpCodes.Ldc_I4_0); break; + case 1: Emit(OpCodes.Ldc_I4_1); break; + case 2: Emit(OpCodes.Ldc_I4_2); break; + case 3: Emit(OpCodes.Ldc_I4_3); break; + case 4: Emit(OpCodes.Ldc_I4_4); break; + case 5: Emit(OpCodes.Ldc_I4_5); break; + case 6: Emit(OpCodes.Ldc_I4_6); break; + case 7: Emit(OpCodes.Ldc_I4_7); break; + case 8: Emit(OpCodes.Ldc_I4_8); break; + case -1: Emit(OpCodes.Ldc_I4_M1); break; + default: + if (value >= -128 && value <= 127) + { + il.Emit(OpCodes.Ldc_I4_S, (sbyte)value); + TraceCompile(OpCodes.Ldc_I4_S + ": " + value); + } + else + { + il.Emit(OpCodes.Ldc_I4, value); + TraceCompile(OpCodes.Ldc_I4 + ": " + value); + } + break; + + } + } + + MutableList locals = new MutableList(); + internal LocalBuilder GetFromPool(Type type) + { + int count = locals.Count; + for (int i = 0; i < count; i++) + { + LocalBuilder item = (LocalBuilder)locals[i]; + if (item != null && item.LocalType == type) + { + locals[i] = null; // remove from pool + return item; + } + } + LocalBuilder result = il.DeclareLocal(type); + TraceCompile("$ " + result + ": " + type); + return result; + } + + // + internal void ReleaseToPool(LocalBuilder value) + { + int count = locals.Count; + for (int i = 0; i < count; i++) + { + if (locals[i] == null) + { + locals[i] = value; // released into existing slot + return; + } + } + locals.Add(value); // create a new slot + } + + public void LoadReaderWriter() + { + Emit(isStatic ? OpCodes.Ldarg_1 : OpCodes.Ldarg_2); + } + + public void StoreValue(Local local) + { + if (local == this.InputValue) + { + byte b = isStatic ? (byte)0 : (byte)1; + il.Emit(OpCodes.Starg_S, b); + TraceCompile(OpCodes.Starg_S + ": $" + b); + } + else + { + + switch (local.Value.LocalIndex) + { + case 0: Emit(OpCodes.Stloc_0); break; + case 1: Emit(OpCodes.Stloc_1); break; + case 2: Emit(OpCodes.Stloc_2); break; + case 3: Emit(OpCodes.Stloc_3); break; + default: + + OpCode code = UseShortForm(local) ? OpCodes.Stloc_S : OpCodes.Stloc; + il.Emit(code, local.Value); + TraceCompile(code + ": $" + local.Value); + + break; + } + } + } + + public void LoadValue(Local local) + { + if (local == null) { /* nothing to do; top of stack */} + else if (local == this.InputValue) + { + Emit(isStatic ? OpCodes.Ldarg_0 : OpCodes.Ldarg_1); + } + else + { + + switch (local.Value.LocalIndex) + { + case 0: Emit(OpCodes.Ldloc_0); break; + case 1: Emit(OpCodes.Ldloc_1); break; + case 2: Emit(OpCodes.Ldloc_2); break; + case 3: Emit(OpCodes.Ldloc_3); break; + default: + + OpCode code = UseShortForm(local) ? OpCodes.Ldloc_S : OpCodes.Ldloc; + il.Emit(code, local.Value); + TraceCompile(code + ": $" + local.Value); + + break; + } + } + } + + public Local GetLocalWithValue(Type type, Compiler.Local fromValue) + { + if (fromValue != null) + { + if (fromValue.Type == type) return fromValue.AsCopy(); + // otherwise, load onto the stack and let the default handling (below) deal with it + LoadValue(fromValue); + if (!Helpers.IsValueType(type) && (fromValue.Type == null || !type.IsAssignableFrom(fromValue.Type))) + { // need to cast + Cast(type); + } + } + // need to store the value from the stack + Local result = new Local(this, type); + StoreValue(result); + return result; + } + + internal void EmitBasicRead(string methodName, Type expectedType) + { + MethodInfo method = MapType(typeof(ProtoReader)).GetMethod( + methodName, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); + if (method == null || method.ReturnType != expectedType + || method.GetParameters().Length != 0) throw new ArgumentException("methodName"); + LoadReaderWriter(); + EmitCall(method); + } + + internal void EmitBasicRead(Type helperType, string methodName, Type expectedType) + { + MethodInfo method = helperType.GetMethod( + methodName, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static); + if (method == null || method.ReturnType != expectedType + || method.GetParameters().Length != 1) throw new ArgumentException("methodName"); + LoadReaderWriter(); + EmitCall(method); + } + + internal void EmitBasicWrite(string methodName, Compiler.Local fromValue) + { + if (string.IsNullOrEmpty(methodName)) throw new ArgumentNullException("methodName"); + LoadValue(fromValue); + LoadReaderWriter(); + EmitCall(GetWriterMethod(methodName)); + } + + private MethodInfo GetWriterMethod(string methodName) + { + Type writerType = MapType(typeof(ProtoWriter)); + MethodInfo[] methods = writerType.GetMethods(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static); + foreach (MethodInfo method in methods) + { + if (method.Name != methodName) continue; + ParameterInfo[] pis = method.GetParameters(); + if (pis.Length == 2 && pis[1].ParameterType == writerType) return method; + } + throw new ArgumentException("No suitable method found for: " + methodName, "methodName"); + } + + internal void EmitWrite(Type helperType, string methodName, Compiler.Local valueFrom) + { + if (string.IsNullOrEmpty(methodName)) throw new ArgumentNullException("methodName"); + MethodInfo method = helperType.GetMethod( + methodName, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static); + if (method == null || method.ReturnType != MapType(typeof(void))) throw new ArgumentException("methodName"); + LoadValue(valueFrom); + LoadReaderWriter(); + EmitCall(method); + } + + public void EmitCall(MethodInfo method) { EmitCall(method, null); } + + public void EmitCall(MethodInfo method, Type targetType) + { + Helpers.DebugAssert(method != null); + MemberInfo member = method; + CheckAccessibility(ref member); + OpCode opcode; + if (method.IsStatic || Helpers.IsValueType(method.DeclaringType)) + { + opcode = OpCodes.Call; + } + else + { + opcode = OpCodes.Callvirt; + if (targetType != null && Helpers.IsValueType(targetType) && !Helpers.IsValueType(method.DeclaringType)) + { + Constrain(targetType); + } + } + il.EmitCall(opcode, method, null); + TraceCompile(opcode + ": " + method + " on " + method.DeclaringType + (targetType == null ? "" : (" via " + targetType))); + } + + /// + /// Pushes a null reference onto the stack. Note that this should only + /// be used to return a null (or set a variable to null); for null-tests + /// use BranchIfTrue / BranchIfFalse. + /// + public void LoadNullRef() + { + Emit(OpCodes.Ldnull); + } + + private int nextLabel; + + internal void WriteNullCheckedTail(Type type, IProtoSerializer tail, Compiler.Local valueFrom) + { + if (Helpers.IsValueType(type)) + { + Type underlyingType = Helpers.GetUnderlyingType(type); + + if (underlyingType == null) + { // not a nullable T; can invoke directly + tail.EmitWrite(this, valueFrom); + } + else + { // nullable T; check HasValue + using (Compiler.Local valOrNull = GetLocalWithValue(type, valueFrom)) + { + LoadAddress(valOrNull, type); + LoadValue(type.GetProperty("HasValue")); + CodeLabel @end = DefineLabel(); + BranchIfFalse(@end, false); + LoadAddress(valOrNull, type); + EmitCall(type.GetMethod("GetValueOrDefault", Helpers.EmptyTypes)); + tail.EmitWrite(this, null); + MarkLabel(@end); + } + } + } + else + { // ref-type; do a null-check + LoadValue(valueFrom); + CopyValue(); + CodeLabel hasVal = DefineLabel(), @end = DefineLabel(); + BranchIfTrue(hasVal, true); + DiscardValue(); + Branch(@end, false); + MarkLabel(hasVal); + tail.EmitWrite(this, null); + MarkLabel(@end); + } + } + + internal void ReadNullCheckedTail(Type type, IProtoSerializer tail, Compiler.Local valueFrom) + { + + Type underlyingType; + + if (Helpers.IsValueType(type) && (underlyingType = Helpers.GetUnderlyingType(type)) != null) + { + if (tail.RequiresOldValue) + { + // we expect the input value to be in valueFrom; need to unpack it from T? + using (Local loc = GetLocalWithValue(type, valueFrom)) + { + LoadAddress(loc, type); + EmitCall(type.GetMethod("GetValueOrDefault", Helpers.EmptyTypes)); + } + } + else + { + Helpers.DebugAssert(valueFrom == null); // not expecting a valueFrom in this case + } + tail.EmitRead(this, null); // either unwrapped on the stack or not provided + if (tail.ReturnsValue) + { + // now re-wrap the value + EmitCtor(type, underlyingType); + } + return; + } + + // either a ref-type of a non-nullable struct; treat "as is", even if null + // (the type-serializer will handle the null case; it needs to allow null + // inputs to perform the correct type of subclass creation) + tail.EmitRead(this, valueFrom); + } + + public void EmitCtor(Type type) + { + EmitCtor(type, Helpers.EmptyTypes); + } + + public void EmitCtor(ConstructorInfo ctor) + { + if (ctor == null) throw new ArgumentNullException("ctor"); + MemberInfo ctorMember = ctor; + CheckAccessibility(ref ctorMember); + il.Emit(OpCodes.Newobj, ctor); + TraceCompile(OpCodes.Newobj + ": " + ctor.DeclaringType); + } + + public void InitLocal(Type type, Compiler.Local target) + { + LoadAddress(target, type, evenIfClass: true); // for class, initobj is a load-null, store-indirect + il.Emit(OpCodes.Initobj, type); + TraceCompile(OpCodes.Initobj + ": " + type); + } + + public void EmitCtor(Type type, params Type[] parameterTypes) + { + Helpers.DebugAssert(type != null); + Helpers.DebugAssert(parameterTypes != null); + if (Helpers.IsValueType(type) && parameterTypes.Length == 0) + { + il.Emit(OpCodes.Initobj, type); + TraceCompile(OpCodes.Initobj + ": " + type); + } + else + { + ConstructorInfo ctor = Helpers.GetConstructor(type +#if COREFX + .GetTypeInfo() +#endif + , parameterTypes, true); + if (ctor == null) throw new InvalidOperationException("No suitable constructor found for " + type.FullName); + EmitCtor(ctor); + } + } + + BasicList knownTrustedAssemblies, knownUntrustedAssemblies; + + bool InternalsVisible(Assembly assembly) + { + if (string.IsNullOrEmpty(assemblyName)) return false; + if (knownTrustedAssemblies != null) + { + if (knownTrustedAssemblies.IndexOfReference(assembly) >= 0) + { + return true; + } + } + if (knownUntrustedAssemblies != null) + { + if (knownUntrustedAssemblies.IndexOfReference(assembly) >= 0) + { + return false; + } + } + bool isTrusted = false; + Type attributeType = MapType(typeof(System.Runtime.CompilerServices.InternalsVisibleToAttribute)); + if (attributeType == null) return false; + +#if COREFX + foreach (System.Runtime.CompilerServices.InternalsVisibleToAttribute attrib in assembly.GetCustomAttributes(attributeType)) +#else + foreach (System.Runtime.CompilerServices.InternalsVisibleToAttribute attrib in assembly.GetCustomAttributes(attributeType, false)) +#endif + { + if (attrib.AssemblyName == assemblyName || attrib.AssemblyName.StartsWith(assemblyName + ",")) + { + isTrusted = true; + break; + } + } + + if (isTrusted) + { + if (knownTrustedAssemblies == null) knownTrustedAssemblies = new BasicList(); + knownTrustedAssemblies.Add(assembly); + } + else + { + if (knownUntrustedAssemblies == null) knownUntrustedAssemblies = new BasicList(); + knownUntrustedAssemblies.Add(assembly); + } + return isTrusted; + } + + internal void CheckAccessibility(ref MemberInfo member) + { + if (member == null) + { + throw new ArgumentNullException(nameof(member)); + } +#if !COREFX + Type type; +#endif + if (!NonPublic) + { + if (member is FieldInfo && member.Name.StartsWith("<") & member.Name.EndsWith(">k__BackingField")) + { + var propName = member.Name.Substring(1, member.Name.Length - 17); + var prop = member.DeclaringType.GetProperty(propName, BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static); + if (prop != null) member = prop; + } + bool isPublic; +#if COREFX + if (member is TypeInfo) + { + TypeInfo ti = (TypeInfo)member; + do + { + isPublic = ti.IsNestedPublic || ti.IsPublic || ((ti.IsNested || ti.IsNestedAssembly || ti.IsNestedFamORAssem) && InternalsVisible(ti.Assembly)); + } while (isPublic && ti.IsNested && (ti = ti.DeclaringType.GetTypeInfo()) != null); + } + else if (member is FieldInfo) + { + FieldInfo field = ((FieldInfo)member); + isPublic = field.IsPublic || ((field.IsAssembly || field.IsFamilyOrAssembly) && InternalsVisible(Helpers.GetAssembly(field.DeclaringType))); + } + else if (member is PropertyInfo) + { + isPublic = true; // defer to get/set + } + else if (member is ConstructorInfo) + { + ConstructorInfo ctor = ((ConstructorInfo)member); + isPublic = ctor.IsPublic || ((ctor.IsAssembly || ctor.IsFamilyOrAssembly) && InternalsVisible(Helpers.GetAssembly(ctor.DeclaringType))); + } + else if (member is MethodInfo) + { + MethodInfo method = ((MethodInfo)member); + isPublic = method.IsPublic || ((method.IsAssembly || method.IsFamilyOrAssembly) && InternalsVisible(Helpers.GetAssembly(method.DeclaringType))); + if (!isPublic) + { + // allow calls to TypeModel protected methods, and methods we are in the process of creating + if ( + member is MethodBuilder || + member.DeclaringType == MapType(typeof(TypeModel))) + isPublic = true; + } + } + else + { + throw new NotSupportedException(member.GetType().Name); + } +#else + MemberTypes memberType = member.MemberType; + switch (memberType) + { + case MemberTypes.TypeInfo: + // top-level type + type = (Type)member; + isPublic = type.IsPublic || InternalsVisible(type.Assembly); + break; + case MemberTypes.NestedType: + type = (Type)member; + do + { + isPublic = type.IsNestedPublic || type.IsPublic || ((type.DeclaringType == null || type.IsNestedAssembly || type.IsNestedFamORAssem) && InternalsVisible(type.Assembly)); + } while (isPublic && (type = type.DeclaringType) != null); // ^^^ !type.IsNested, but not all runtimes have that + break; + case MemberTypes.Field: + FieldInfo field = ((FieldInfo)member); + isPublic = field.IsPublic || ((field.IsAssembly || field.IsFamilyOrAssembly) && InternalsVisible(field.DeclaringType.Assembly)); + break; + case MemberTypes.Constructor: + ConstructorInfo ctor = ((ConstructorInfo)member); + isPublic = ctor.IsPublic || ((ctor.IsAssembly || ctor.IsFamilyOrAssembly) && InternalsVisible(ctor.DeclaringType.Assembly)); + break; + case MemberTypes.Method: + MethodInfo method = ((MethodInfo)member); + isPublic = method.IsPublic || ((method.IsAssembly || method.IsFamilyOrAssembly) && InternalsVisible(method.DeclaringType.Assembly)); + if (!isPublic) + { + // allow calls to TypeModel protected methods, and methods we are in the process of creating + if ( + member is MethodBuilder || + member.DeclaringType == MapType(typeof(TypeModel))) isPublic = true; + } + break; + case MemberTypes.Property: + isPublic = true; // defer to get/set + break; + default: + throw new NotSupportedException(memberType.ToString()); + } +#endif + if (!isPublic) + { +#if COREFX + if (member is TypeInfo) + { + throw new InvalidOperationException("Non-public type cannot be used with full dll compilation: " + + ((TypeInfo)member).FullName); + } + else + { + throw new InvalidOperationException("Non-public member cannot be used with full dll compilation: " + + member.DeclaringType.FullName + "." + member.Name); + } + +#else + switch (memberType) + { + case MemberTypes.TypeInfo: + case MemberTypes.NestedType: + throw new InvalidOperationException("Non-public type cannot be used with full dll compilation: " + + ((Type)member).FullName); + default: + throw new InvalidOperationException("Non-public member cannot be used with full dll compilation: " + + member.DeclaringType.FullName + "." + member.Name); + } +#endif + + } + } + } + + public void LoadValue(FieldInfo field) + { + MemberInfo member = field; + CheckAccessibility(ref member); + if (member is PropertyInfo) + { + LoadValue((PropertyInfo)member); + } + else + { + OpCode code = field.IsStatic ? OpCodes.Ldsfld : OpCodes.Ldfld; + il.Emit(code, field); + TraceCompile(code + ": " + field + " on " + field.DeclaringType); + } + } + + public void StoreValue(FieldInfo field) + { + MemberInfo member = field; + CheckAccessibility(ref member); + if (member is PropertyInfo) + { + StoreValue((PropertyInfo)member); + } + else + { + OpCode code = field.IsStatic ? OpCodes.Stsfld : OpCodes.Stfld; + il.Emit(code, field); + TraceCompile(code + ": " + field + " on " + field.DeclaringType); + } + } + + public void LoadValue(PropertyInfo property) + { + MemberInfo member = property; + CheckAccessibility(ref member); + EmitCall(Helpers.GetGetMethod(property, true, true)); + } + + public void StoreValue(PropertyInfo property) + { + MemberInfo member = property; + CheckAccessibility(ref member); + EmitCall(Helpers.GetSetMethod(property, true, true)); + } + + //internal void EmitInstance() + //{ + // if (isStatic) throw new InvalidOperationException(); + // Emit(OpCodes.Ldarg_0); + //} + + internal static void LoadValue(ILGenerator il, int value) + { + switch (value) + { + case 0: il.Emit(OpCodes.Ldc_I4_0); break; + case 1: il.Emit(OpCodes.Ldc_I4_1); break; + case 2: il.Emit(OpCodes.Ldc_I4_2); break; + case 3: il.Emit(OpCodes.Ldc_I4_3); break; + case 4: il.Emit(OpCodes.Ldc_I4_4); break; + case 5: il.Emit(OpCodes.Ldc_I4_5); break; + case 6: il.Emit(OpCodes.Ldc_I4_6); break; + case 7: il.Emit(OpCodes.Ldc_I4_7); break; + case 8: il.Emit(OpCodes.Ldc_I4_8); break; + case -1: il.Emit(OpCodes.Ldc_I4_M1); break; + default: il.Emit(OpCodes.Ldc_I4, value); break; + } + } + + private bool UseShortForm(Local local) + { + return local.Value.LocalIndex < 256; + } + + internal void LoadAddress(Local local, Type type, bool evenIfClass = false) + { + if (evenIfClass || Helpers.IsValueType(type)) + { + if (local == null) + { + throw new InvalidOperationException("Cannot load the address of the head of the stack"); + } + + if (local == this.InputValue) + { + il.Emit(OpCodes.Ldarga_S, (isStatic ? (byte)0 : (byte)1)); + TraceCompile(OpCodes.Ldarga_S + ": $" + (isStatic ? 0 : 1)); + } + else + { + OpCode code = UseShortForm(local) ? OpCodes.Ldloca_S : OpCodes.Ldloca; + il.Emit(code, local.Value); + TraceCompile(code + ": $" + local.Value); + } + + } + else + { // reference-type; already *is* the address; just load it + LoadValue(local); + } + } + + internal void Branch(CodeLabel label, bool @short) + { + OpCode code = @short ? OpCodes.Br_S : OpCodes.Br; + il.Emit(code, label.Value); + TraceCompile(code + ": " + label.Index); + } + + internal void BranchIfFalse(CodeLabel label, bool @short) + { + OpCode code = @short ? OpCodes.Brfalse_S : OpCodes.Brfalse; + il.Emit(code, label.Value); + TraceCompile(code + ": " + label.Index); + } + + internal void BranchIfTrue(CodeLabel label, bool @short) + { + OpCode code = @short ? OpCodes.Brtrue_S : OpCodes.Brtrue; + il.Emit(code, label.Value); + TraceCompile(code + ": " + label.Index); + } + + internal void BranchIfEqual(CodeLabel label, bool @short) + { + OpCode code = @short ? OpCodes.Beq_S : OpCodes.Beq; + il.Emit(code, label.Value); + TraceCompile(code + ": " + label.Index); + } + + //internal void TestEqual() + //{ + // Emit(OpCodes.Ceq); + //} + + internal void CopyValue() + { + Emit(OpCodes.Dup); + } + + internal void BranchIfGreater(CodeLabel label, bool @short) + { + OpCode code = @short ? OpCodes.Bgt_S : OpCodes.Bgt; + il.Emit(code, label.Value); + TraceCompile(code + ": " + label.Index); + } + + internal void BranchIfLess(CodeLabel label, bool @short) + { + OpCode code = @short ? OpCodes.Blt_S : OpCodes.Blt; + il.Emit(code, label.Value); + TraceCompile(code + ": " + label.Index); + } + + internal void DiscardValue() + { + Emit(OpCodes.Pop); + } + + public void Subtract() + { + Emit(OpCodes.Sub); + } + + public void Switch(CodeLabel[] jumpTable) + { + const int MAX_JUMPS = 128; + + if (jumpTable.Length <= MAX_JUMPS) + { + // simple case + Label[] labels = new Label[jumpTable.Length]; + for (int i = 0; i < labels.Length; i++) + { + labels[i] = jumpTable[i].Value; + } + TraceCompile(OpCodes.Switch.ToString()); + il.Emit(OpCodes.Switch, labels); + } + else + { + // too many to jump easily (especially on Android) - need to split up (note: uses a local pulled from the stack) + using (Local val = GetLocalWithValue(MapType(typeof(int)), null)) + { + int count = jumpTable.Length, offset = 0; + int blockCount = count / MAX_JUMPS; + if ((count % MAX_JUMPS) != 0) blockCount++; + + Label[] blockLabels = new Label[blockCount]; + for (int i = 0; i < blockCount; i++) + { + blockLabels[i] = il.DefineLabel(); + } + CodeLabel endOfSwitch = DefineLabel(); + + LoadValue(val); + LoadValue(MAX_JUMPS); + Emit(OpCodes.Div); + TraceCompile(OpCodes.Switch.ToString()); + il.Emit(OpCodes.Switch, blockLabels); + Branch(endOfSwitch, false); + + Label[] innerLabels = new Label[MAX_JUMPS]; + for (int blockIndex = 0; blockIndex < blockCount; blockIndex++) + { + il.MarkLabel(blockLabels[blockIndex]); + + int itemsThisBlock = Math.Min(MAX_JUMPS, count); + count -= itemsThisBlock; + if (innerLabels.Length != itemsThisBlock) innerLabels = new Label[itemsThisBlock]; + + int subtract = offset; + for (int j = 0; j < itemsThisBlock; j++) + { + innerLabels[j] = jumpTable[offset++].Value; + } + LoadValue(val); + if (subtract != 0) // switches are always zero-based + { + LoadValue(subtract); + Emit(OpCodes.Sub); + } + TraceCompile(OpCodes.Switch.ToString()); + il.Emit(OpCodes.Switch, innerLabels); + if (count != 0) + { // force default to the very bottom + Branch(endOfSwitch, false); + } + } + Helpers.DebugAssert(count == 0, "Should use exactly all switch items"); + MarkLabel(endOfSwitch); + } + } + } + + internal void EndFinally() + { + il.EndExceptionBlock(); + TraceCompile("EndExceptionBlock"); + } + + internal void BeginFinally() + { + il.BeginFinallyBlock(); + TraceCompile("BeginFinallyBlock"); + } + + internal void EndTry(CodeLabel label, bool @short) + { + OpCode code = @short ? OpCodes.Leave_S : OpCodes.Leave; + il.Emit(code, label.Value); + TraceCompile(code + ": " + label.Index); + } + + internal CodeLabel BeginTry() + { + CodeLabel label = new CodeLabel(il.BeginExceptionBlock(), nextLabel++); + TraceCompile("BeginExceptionBlock: " + label.Index); + return label; + } + + internal void Constrain(Type type) + { + il.Emit(OpCodes.Constrained, type); + TraceCompile(OpCodes.Constrained + ": " + type); + } + + internal void TryCast(Type type) + { + il.Emit(OpCodes.Isinst, type); + TraceCompile(OpCodes.Isinst + ": " + type); + } + + internal void Cast(Type type) + { + il.Emit(OpCodes.Castclass, type); + TraceCompile(OpCodes.Castclass + ": " + type); + } + + public IDisposable Using(Local local) + { + return new UsingBlock(this, local); + } + + private sealed class UsingBlock : IDisposable + { + private Local local; + CompilerContext ctx; + CodeLabel label; + /// + /// Creates a new "using" block (equivalent) around a variable; + /// the variable must exist, and note that (unlike in C#) it is + /// the variables *final* value that gets disposed. If you need + /// *original* disposal, copy your variable first. + /// + /// It is the callers responsibility to ensure that the variable's + /// scope fully-encapsulates the "using"; if not, the variable + /// may be re-used (and thus re-assigned) unexpectedly. + /// + public UsingBlock(CompilerContext ctx, Local local) + { + if (ctx == null) throw new ArgumentNullException("ctx"); + if (local == null) throw new ArgumentNullException("local"); + + Type type = local.Type; + // check if **never** disposable + if ((Helpers.IsValueType(type) || Helpers.IsSealed(type)) && + !ctx.MapType(typeof(IDisposable)).IsAssignableFrom(type)) + { + return; // nothing to do! easiest "using" block ever + // (note that C# wouldn't allow this as a "using" block, + // but we'll be generous and simply not do anything) + } + this.local = local; + this.ctx = ctx; + label = ctx.BeginTry(); + + } + public void Dispose() + { + if (local == null || ctx == null) return; + + ctx.EndTry(label, false); + ctx.BeginFinally(); + Type disposableType = ctx.MapType(typeof(IDisposable)); + MethodInfo dispose = disposableType.GetMethod("Dispose"); + Type type = local.Type; + // remember that we've already (in the .ctor) excluded the case + // where it *cannot* be disposable + if (Helpers.IsValueType(type)) + { + ctx.LoadAddress(local, type); + switch (ctx.MetadataVersion) + { + case ILVersion.Net1: + ctx.LoadValue(local); + ctx.CastToObject(type); + break; + default: + ctx.Constrain(type); + break; + } + ctx.EmitCall(dispose); + } + else + { + Compiler.CodeLabel @null = ctx.DefineLabel(); + if (disposableType.IsAssignableFrom(type)) + { // *known* to be IDisposable; just needs a null-check + ctx.LoadValue(local); + ctx.BranchIfFalse(@null, true); + ctx.LoadAddress(local, type); + } + else + { // *could* be IDisposable; test via "as" + using (Compiler.Local disp = new Compiler.Local(ctx, disposableType)) + { + ctx.LoadValue(local); + ctx.TryCast(disposableType); + ctx.CopyValue(); + ctx.StoreValue(disp); + ctx.BranchIfFalse(@null, true); + ctx.LoadAddress(disp, disposableType); + } + } + ctx.EmitCall(dispose); + ctx.MarkLabel(@null); + } + ctx.EndFinally(); + this.local = null; + this.ctx = null; + label = new CodeLabel(); // default + } + } + + internal void Add() + { + Emit(OpCodes.Add); + } + + internal void LoadLength(Local arr, bool zeroIfNull) + { + Helpers.DebugAssert(arr.Type.IsArray && arr.Type.GetArrayRank() == 1); + + if (zeroIfNull) + { + Compiler.CodeLabel notNull = DefineLabel(), done = DefineLabel(); + LoadValue(arr); + CopyValue(); // optimised for non-null case + BranchIfTrue(notNull, true); + DiscardValue(); + LoadValue(0); + Branch(done, true); + MarkLabel(notNull); + Emit(OpCodes.Ldlen); + Emit(OpCodes.Conv_I4); + MarkLabel(done); + } + else + { + LoadValue(arr); + Emit(OpCodes.Ldlen); + Emit(OpCodes.Conv_I4); + } + } + + internal void CreateArray(Type elementType, Local length) + { + LoadValue(length); + il.Emit(OpCodes.Newarr, elementType); + TraceCompile(OpCodes.Newarr + ": " + elementType); + } + + internal void LoadArrayValue(Local arr, Local i) + { + Type type = arr.Type; + Helpers.DebugAssert(type.IsArray && arr.Type.GetArrayRank() == 1); + type = type.GetElementType(); + Helpers.DebugAssert(type != null, "Not an array: " + arr.Type.FullName); + LoadValue(arr); + LoadValue(i); + switch (Helpers.GetTypeCode(type)) + { + case ProtoTypeCode.SByte: Emit(OpCodes.Ldelem_I1); break; + case ProtoTypeCode.Int16: Emit(OpCodes.Ldelem_I2); break; + case ProtoTypeCode.Int32: Emit(OpCodes.Ldelem_I4); break; + case ProtoTypeCode.Int64: Emit(OpCodes.Ldelem_I8); break; + + case ProtoTypeCode.Byte: Emit(OpCodes.Ldelem_U1); break; + case ProtoTypeCode.UInt16: Emit(OpCodes.Ldelem_U2); break; + case ProtoTypeCode.UInt32: Emit(OpCodes.Ldelem_U4); break; + case ProtoTypeCode.UInt64: Emit(OpCodes.Ldelem_I8); break; // odd, but this is what C# does... + + case ProtoTypeCode.Single: Emit(OpCodes.Ldelem_R4); break; + case ProtoTypeCode.Double: Emit(OpCodes.Ldelem_R8); break; + default: + if (Helpers.IsValueType(type)) + { + il.Emit(OpCodes.Ldelema, type); + il.Emit(OpCodes.Ldobj, type); + TraceCompile(OpCodes.Ldelema + ": " + type); + TraceCompile(OpCodes.Ldobj + ": " + type); + } + else + { + Emit(OpCodes.Ldelem_Ref); + } + + break; + } + } + + internal void LoadValue(Type type) + { + il.Emit(OpCodes.Ldtoken, type); + TraceCompile(OpCodes.Ldtoken + ": " + type); + EmitCall(MapType(typeof(System.Type)).GetMethod("GetTypeFromHandle")); + } + + internal void ConvertToInt32(ProtoTypeCode typeCode, bool uint32Overflow) + { + switch (typeCode) + { + case ProtoTypeCode.Byte: + case ProtoTypeCode.SByte: + case ProtoTypeCode.Int16: + case ProtoTypeCode.UInt16: + Emit(OpCodes.Conv_I4); + break; + case ProtoTypeCode.Int32: + break; + case ProtoTypeCode.Int64: + Emit(OpCodes.Conv_Ovf_I4); + break; + case ProtoTypeCode.UInt32: + Emit(uint32Overflow ? OpCodes.Conv_Ovf_I4_Un : OpCodes.Conv_Ovf_I4); + break; + case ProtoTypeCode.UInt64: + Emit(OpCodes.Conv_Ovf_I4_Un); + break; + default: + throw new InvalidOperationException("ConvertToInt32 not implemented for: " + typeCode.ToString()); + } + } + + internal void ConvertFromInt32(ProtoTypeCode typeCode, bool uint32Overflow) + { + switch (typeCode) + { + case ProtoTypeCode.SByte: Emit(OpCodes.Conv_Ovf_I1); break; + case ProtoTypeCode.Byte: Emit(OpCodes.Conv_Ovf_U1); break; + case ProtoTypeCode.Int16: Emit(OpCodes.Conv_Ovf_I2); break; + case ProtoTypeCode.UInt16: Emit(OpCodes.Conv_Ovf_U2); break; + case ProtoTypeCode.Int32: break; + case ProtoTypeCode.UInt32: Emit(uint32Overflow ? OpCodes.Conv_Ovf_U4 : OpCodes.Conv_U4); break; + case ProtoTypeCode.Int64: Emit(OpCodes.Conv_I8); break; + case ProtoTypeCode.UInt64: Emit(OpCodes.Conv_U8); break; + default: throw new InvalidOperationException(); + } + } + + internal void LoadValue(decimal value) + { + if (value == 0M) + { + LoadValue(typeof(decimal).GetField("Zero")); + } + else + { + int[] bits = decimal.GetBits(value); + LoadValue(bits[0]); // lo + LoadValue(bits[1]); // mid + LoadValue(bits[2]); // hi + LoadValue((int)(((uint)bits[3]) >> 31)); // isNegative (bool, but int for CLI purposes) + LoadValue((bits[3] >> 16) & 0xFF); // scale (byte, but int for CLI purposes) + + EmitCtor(MapType(typeof(decimal)), new Type[] { MapType(typeof(int)), MapType(typeof(int)), MapType(typeof(int)), MapType(typeof(bool)), MapType(typeof(byte)) }); + } + } + + internal void LoadValue(Guid value) + { + if (value == Guid.Empty) + { + LoadValue(typeof(Guid).GetField("Empty")); + } + else + { // note we're adding lots of shorts/bytes here - but at the IL level they are I4, not I1/I2 (which barely exist) + byte[] bytes = value.ToByteArray(); + int i = (bytes[0]) | (bytes[1] << 8) | (bytes[2] << 16) | (bytes[3] << 24); + LoadValue(i); + short s = (short)((bytes[4]) | (bytes[5] << 8)); + LoadValue(s); + s = (short)((bytes[6]) | (bytes[7] << 8)); + LoadValue(s); + for (i = 8; i <= 15; i++) + { + LoadValue(bytes[i]); + } + EmitCtor(MapType(typeof(Guid)), new Type[] { MapType(typeof(int)), MapType(typeof(short)), MapType(typeof(short)), + MapType(typeof(byte)), MapType(typeof(byte)), MapType(typeof(byte)), MapType(typeof(byte)), MapType(typeof(byte)), MapType(typeof(byte)), MapType(typeof(byte)), MapType(typeof(byte)) }); + } + } + + //internal void LoadValue(bool value) + //{ + // Emit(value ? OpCodes.Ldc_I4_1 : OpCodes.Ldc_I4_0); + //} + + internal void LoadSerializationContext() + { + LoadReaderWriter(); + LoadValue((isWriter ? typeof(ProtoWriter) : typeof(ProtoReader)).GetProperty("Context")); + } + + private readonly TypeModel model; + + internal Type MapType(Type type) + { + return model.MapType(type); + } + + private readonly ILVersion metadataVersion; + public ILVersion MetadataVersion { get { return metadataVersion; } } + public enum ILVersion + { + Net1, Net2 + } + + internal bool AllowInternal(PropertyInfo property) + { + return NonPublic ? true : InternalsVisible(Helpers.GetAssembly(property.DeclaringType)); + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Compiler/CompilerContext.cs.meta b/Runtime/Protobuf-net/Compiler/CompilerContext.cs.meta new file mode 100644 index 0000000..b40174b --- /dev/null +++ b/Runtime/Protobuf-net/Compiler/CompilerContext.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a58d20a1d8c7730499ef29a11532d07e +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Compiler/CompilerDelegates.cs b/Runtime/Protobuf-net/Compiler/CompilerDelegates.cs new file mode 100644 index 0000000..e7f0508 --- /dev/null +++ b/Runtime/Protobuf-net/Compiler/CompilerDelegates.cs @@ -0,0 +1,7 @@ +#if FEAT_COMPILER +namespace ProtoBuf.Compiler +{ + internal delegate void ProtoSerializer(object value, ProtoWriter dest); + internal delegate object ProtoDeserializer(object value, ProtoReader source); +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Compiler/CompilerDelegates.cs.meta b/Runtime/Protobuf-net/Compiler/CompilerDelegates.cs.meta new file mode 100644 index 0000000..c9fedb0 --- /dev/null +++ b/Runtime/Protobuf-net/Compiler/CompilerDelegates.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 3b923d7ab8e95f740b059ca797596261 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Compiler/Local.cs b/Runtime/Protobuf-net/Compiler/Local.cs new file mode 100644 index 0000000..fd3dfa9 --- /dev/null +++ b/Runtime/Protobuf-net/Compiler/Local.cs @@ -0,0 +1,58 @@ +#if FEAT_COMPILER +using System; +using System.Reflection.Emit; + +namespace ProtoBuf.Compiler +{ + internal sealed class Local : IDisposable + { + // public static readonly Local InputValue = new Local(null, null); + private LocalBuilder value; + private readonly Type type; + private CompilerContext ctx; + + private Local(LocalBuilder value, Type type) + { + this.value = value; + this.type = type; + } + + internal Local(CompilerContext ctx, Type type) + { + this.ctx = ctx; + if (ctx != null) { value = ctx.GetFromPool(type); } + this.type = type; + } + + internal LocalBuilder Value => value ?? throw new ObjectDisposedException(GetType().Name); + + public Type Type => type; + + public Local AsCopy() + { + if (ctx == null) return this; // can re-use if context-free + return new Local(value, this.type); + } + + public void Dispose() + { + if (ctx != null) + { + // only *actually* dispose if this is context-bound; note that non-bound + // objects are cheekily re-used, and *must* be left intact agter a "using" etc + ctx.ReleaseToPool(value); + value = null; + ctx = null; + } + } + + internal bool IsSame(Local other) + { + if((object)this == (object)other) return true; + + object ourVal = value; // use prop to ensure obj-disposed etc + return other != null && ourVal == (object)(other.value); + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Compiler/Local.cs.meta b/Runtime/Protobuf-net/Compiler/Local.cs.meta new file mode 100644 index 0000000..2767c29 --- /dev/null +++ b/Runtime/Protobuf-net/Compiler/Local.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 07d12d9a9b7d45b498e28b7c39bdca01 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/DataFormat.cs b/Runtime/Protobuf-net/DataFormat.cs new file mode 100644 index 0000000..4d97b4f --- /dev/null +++ b/Runtime/Protobuf-net/DataFormat.cs @@ -0,0 +1,49 @@ + +namespace ProtoBuf +{ + /// + /// Sub-format to use when serializing/deserializing data + /// + public enum DataFormat + { + /// + /// Uses the default encoding for the data-type. + /// + Default, + + /// + /// When applied to signed integer-based data (including Decimal), this + /// indicates that zigzag variant encoding will be used. This means that values + /// with small magnitude (regardless of sign) take a small amount + /// of space to encode. + /// + ZigZag, + + /// + /// When applied to signed integer-based data (including Decimal), this + /// indicates that two's-complement variant encoding will be used. + /// This means that any -ve number will take 10 bytes (even for 32-bit), + /// so should only be used for compatibility. + /// + TwosComplement, + + /// + /// When applied to signed integer-based data (including Decimal), this + /// indicates that a fixed amount of space will be used. + /// + FixedSize, + + /// + /// When applied to a sub-message, indicates that the value should be treated + /// as group-delimited. + /// + Group, + + /// + /// When applied to members of types such as DateTime or TimeSpan, specifies + /// that the "well known" standardized representation should be use; DateTime uses Timestamp, + /// + /// + WellKnown + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/DataFormat.cs.meta b/Runtime/Protobuf-net/DataFormat.cs.meta new file mode 100644 index 0000000..644abad --- /dev/null +++ b/Runtime/Protobuf-net/DataFormat.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 875f2f7de4b03ff409de70d226359e8f +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/DiscriminatedUnion.Serializable.cs b/Runtime/Protobuf-net/DiscriminatedUnion.Serializable.cs new file mode 100644 index 0000000..0fd671f --- /dev/null +++ b/Runtime/Protobuf-net/DiscriminatedUnion.Serializable.cs @@ -0,0 +1,176 @@ +#if PLAT_BINARYFORMATTER +using System; +using System.Runtime.InteropServices; +using System.Runtime.Serialization; + +namespace ProtoBuf +{ + [Serializable] + public readonly partial struct DiscriminatedUnionObject : ISerializable + { + void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) + { + if (Discriminator != default) info.AddValue("d", Discriminator); + if (Object is object) info.AddValue("o", Object); + } + private DiscriminatedUnionObject(SerializationInfo info, StreamingContext context) + { + this = default; + foreach (var field in info) + { + switch (field.Name) + { + case "d": Discriminator = (int)field.Value; break; + case "o": Object = field.Value; break; + } + } + } + } + + [Serializable] + public readonly partial struct DiscriminatedUnion128Object : ISerializable + { + [FieldOffset(8)] private readonly long _lo; + [FieldOffset(16)] private readonly long _hi; + void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) + { + if (_discriminator != default) info.AddValue("d", _discriminator); + if (_lo != default) info.AddValue("l", _lo); + if (_hi != default) info.AddValue("h", _hi); + if (Object != null) info.AddValue("o", Object); + } + private DiscriminatedUnion128Object(SerializationInfo info, StreamingContext context) + { + this = default; + foreach (var field in info) + { + switch (field.Name) + { + case "d": _discriminator = (int)field.Value; break; + case "l": _lo = (long)field.Value; break; + case "h": _hi = (long)field.Value; break; + case "o": Object = field.Value; break; + } + } + } + } + + [Serializable] + public readonly partial struct DiscriminatedUnion128 : ISerializable + { + [FieldOffset(8)] private readonly long _lo; + [FieldOffset(16)] private readonly long _hi; + void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) + { + if (_discriminator != default) info.AddValue("d", _discriminator); + if (_lo != default) info.AddValue("l", _lo); + if (_hi != default) info.AddValue("h", _hi); + } + private DiscriminatedUnion128(SerializationInfo info, StreamingContext context) + { + this = default; + foreach (var field in info) + { + switch (field.Name) + { + case "d": _discriminator = (int)field.Value; break; + case "l": _lo = (long)field.Value; break; + case "h": _hi = (long)field.Value; break; + } + } + } + } + + [Serializable] + public readonly partial struct DiscriminatedUnion64 : ISerializable + { + void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) + { + if (_discriminator != default) info.AddValue("d", _discriminator); + if (Int64 != default) info.AddValue("i", Int64); + } + private DiscriminatedUnion64(SerializationInfo info, StreamingContext context) + { + this = default; + foreach (var field in info) + { + switch (field.Name) + { + case "d": _discriminator = (int)field.Value; break; + case "i": Int64 = (long)field.Value; break; + } + } + } + } + + [Serializable] + public readonly partial struct DiscriminatedUnion64Object : ISerializable + { + void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) + { + if (_discriminator != default) info.AddValue("d", _discriminator); + if (Int64 != default) info.AddValue("i", Int64); + if (Object is object) info.AddValue("o", Object); + } + private DiscriminatedUnion64Object(SerializationInfo info, StreamingContext context) + { + this = default; + foreach (var field in info) + { + switch (field.Name) + { + case "d": _discriminator = (int)field.Value; break; + case "i": Int64 = (long)field.Value; break; + case "o": Object = field.Value; break; + } + } + } + } + + [Serializable] + public readonly partial struct DiscriminatedUnion32 : ISerializable + { + void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) + { + if (_discriminator != default) info.AddValue("d", _discriminator); + if (Int32 != default) info.AddValue("i", Int32); + } + private DiscriminatedUnion32(SerializationInfo info, StreamingContext context) + { + this = default; + foreach (var field in info) + { + switch (field.Name) + { + case "d": _discriminator = (int)field.Value; break; + case "i": Int32 = (int)field.Value; break; + } + } + } + } + + [Serializable] + public readonly partial struct DiscriminatedUnion32Object : ISerializable + { + void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) + { + if (_discriminator != default) info.AddValue("d", _discriminator); + if (Int32 != default) info.AddValue("i", Int32); + if (Object is object) info.AddValue("o", Object); + } + private DiscriminatedUnion32Object(SerializationInfo info, StreamingContext context) + { + this = default; + foreach (var field in info) + { + switch (field.Name) + { + case "d": _discriminator = (int)field.Value; break; + case "i": Int32 = (int)field.Value; break; + case "o": Object = field.Value; break; + } + } + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/DiscriminatedUnion.Serializable.cs.meta b/Runtime/Protobuf-net/DiscriminatedUnion.Serializable.cs.meta new file mode 100644 index 0000000..f616331 --- /dev/null +++ b/Runtime/Protobuf-net/DiscriminatedUnion.Serializable.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 7a3aeec9c8a4c734e9ad022627502d1d +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/DiscriminatedUnion.cs b/Runtime/Protobuf-net/DiscriminatedUnion.cs new file mode 100644 index 0000000..7cc8cf8 --- /dev/null +++ b/Runtime/Protobuf-net/DiscriminatedUnion.cs @@ -0,0 +1,416 @@ +using System; +using System.Runtime.InteropServices; + +namespace ProtoBuf +{ + /// Represent multiple types as a union; this is used as part of OneOf - + /// note that it is the caller's responsbility to only read/write the value as the same type + public readonly partial struct DiscriminatedUnionObject + { + + /// The value typed as Object + public readonly object Object; + + /// Indicates whether the specified discriminator is assigned + public bool Is(int discriminator) => Discriminator == discriminator; + + /// Create a new discriminated union value + public DiscriminatedUnionObject(int discriminator, object value) + { + Discriminator = discriminator; + Object = value; + } + + /// Reset a value if the specified discriminator is assigned + public static void Reset(ref DiscriminatedUnionObject value, int discriminator) + { + if (value.Discriminator == discriminator) value = default; + } + + /// The discriminator value + public int Discriminator { get; } + } + + /// Represent multiple types as a union; this is used as part of OneOf - + /// note that it is the caller's responsbility to only read/write the value as the same type + [StructLayout(LayoutKind.Explicit)] + public readonly partial struct DiscriminatedUnion64 + { +#if !FEAT_SAFE + unsafe static DiscriminatedUnion64() + { + if (sizeof(DateTime) > 8) throw new InvalidOperationException(nameof(DateTime) + " was unexpectedly too big for " + nameof(DiscriminatedUnion64)); + if (sizeof(TimeSpan) > 8) throw new InvalidOperationException(nameof(TimeSpan) + " was unexpectedly too big for " + nameof(DiscriminatedUnion64)); + } +#endif + [FieldOffset(0)] private readonly int _discriminator; // note that we can't pack further because Object needs x8 alignment/padding on x64 + + /// The value typed as Int64 + [FieldOffset(8)] public readonly long Int64; + /// The value typed as UInt64 + [FieldOffset(8)] public readonly ulong UInt64; + /// The value typed as Int32 + [FieldOffset(8)] public readonly int Int32; + /// The value typed as UInt32 + [FieldOffset(8)] public readonly uint UInt32; + /// The value typed as Boolean + [FieldOffset(8)] public readonly bool Boolean; + /// The value typed as Single + [FieldOffset(8)] public readonly float Single; + /// The value typed as Double + [FieldOffset(8)] public readonly double Double; + /// The value typed as DateTime + [FieldOffset(8)] public readonly DateTime DateTime; + /// The value typed as TimeSpan + [FieldOffset(8)] public readonly TimeSpan TimeSpan; + + private DiscriminatedUnion64(int discriminator) : this() + { + _discriminator = discriminator; + } + + /// Indicates whether the specified discriminator is assigned + public bool Is(int discriminator) => _discriminator == discriminator; + + /// Create a new discriminated union value + public DiscriminatedUnion64(int discriminator, long value) : this(discriminator) { Int64 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64(int discriminator, int value) : this(discriminator) { Int32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64(int discriminator, ulong value) : this(discriminator) { UInt64 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64(int discriminator, uint value) : this(discriminator) { UInt32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64(int discriminator, float value) : this(discriminator) { Single = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64(int discriminator, double value) : this(discriminator) { Double = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64(int discriminator, bool value) : this(discriminator) { Boolean = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64(int discriminator, DateTime? value) : this(value.HasValue ? discriminator: 0) { DateTime = value.GetValueOrDefault(); } + /// Create a new discriminated union value + public DiscriminatedUnion64(int discriminator, TimeSpan? value) : this(value.HasValue ? discriminator : 0) { TimeSpan = value.GetValueOrDefault(); } + + /// Reset a value if the specified discriminator is assigned + public static void Reset(ref DiscriminatedUnion64 value, int discriminator) + { + if (value.Discriminator == discriminator) value = default; + } + /// The discriminator value + public int Discriminator => _discriminator; + } + + /// Represent multiple types as a union; this is used as part of OneOf - + /// note that it is the caller's responsbility to only read/write the value as the same type + [StructLayout(LayoutKind.Explicit)] + public readonly partial struct DiscriminatedUnion128Object + { +#if !FEAT_SAFE + unsafe static DiscriminatedUnion128Object() + { + if (sizeof(DateTime) > 16) throw new InvalidOperationException(nameof(DateTime) + " was unexpectedly too big for " + nameof(DiscriminatedUnion128Object)); + if (sizeof(TimeSpan) > 16) throw new InvalidOperationException(nameof(TimeSpan) + " was unexpectedly too big for " + nameof(DiscriminatedUnion128Object)); + if (sizeof(Guid) > 16) throw new InvalidOperationException(nameof(Guid) + " was unexpectedly too big for " + nameof(DiscriminatedUnion128Object)); + } +#endif + + [FieldOffset(0)] private readonly int _discriminator; // note that we can't pack further because Object needs x8 alignment/padding on x64 + + /// The value typed as Int64 + [FieldOffset(8)] public readonly long Int64; + /// The value typed as UInt64 + [FieldOffset(8)] public readonly ulong UInt64; + /// The value typed as Int32 + [FieldOffset(8)] public readonly int Int32; + /// The value typed as UInt32 + [FieldOffset(8)] public readonly uint UInt32; + /// The value typed as Boolean + [FieldOffset(8)] public readonly bool Boolean; + /// The value typed as Single + [FieldOffset(8)] public readonly float Single; + /// The value typed as Double + [FieldOffset(8)] public readonly double Double; + /// The value typed as DateTime + [FieldOffset(8)] public readonly DateTime DateTime; + /// The value typed as TimeSpan + [FieldOffset(8)] public readonly TimeSpan TimeSpan; + /// The value typed as Guid + [FieldOffset(8)] public readonly Guid Guid; + /// The value typed as Object + [FieldOffset(24)] public readonly object Object; + + private DiscriminatedUnion128Object(int discriminator) : this() + { + _discriminator = discriminator; + } + + /// Indicates whether the specified discriminator is assigned + public bool Is(int discriminator) => _discriminator == discriminator; + + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, long value) : this(discriminator) { Int64 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, int value) : this(discriminator) { Int32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, ulong value) : this(discriminator) { UInt64 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, uint value) : this(discriminator) { UInt32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, float value) : this(discriminator) { Single = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, double value) : this(discriminator) { Double = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, bool value) : this(discriminator) { Boolean = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, object value) : this(value != null ? discriminator : 0) { Object = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, DateTime? value) : this(value.HasValue ? discriminator: 0) { DateTime = value.GetValueOrDefault(); } + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, TimeSpan? value) : this(value.HasValue ? discriminator : 0) { TimeSpan = value.GetValueOrDefault(); } + /// Create a new discriminated union value + public DiscriminatedUnion128Object(int discriminator, Guid? value) : this(value.HasValue ? discriminator : 0) { Guid = value.GetValueOrDefault(); } + + /// Reset a value if the specified discriminator is assigned + public static void Reset(ref DiscriminatedUnion128Object value, int discriminator) + { + if (value.Discriminator == discriminator) value = default; + } + /// The discriminator value + public int Discriminator => _discriminator; + } + + /// Represent multiple types as a union; this is used as part of OneOf - + /// note that it is the caller's responsbility to only read/write the value as the same type + [StructLayout(LayoutKind.Explicit)] + public readonly partial struct DiscriminatedUnion128 + { +#if !FEAT_SAFE + unsafe static DiscriminatedUnion128() + { + if (sizeof(DateTime) > 16) throw new InvalidOperationException(nameof(DateTime) + " was unexpectedly too big for " + nameof(DiscriminatedUnion128)); + if (sizeof(TimeSpan) > 16) throw new InvalidOperationException(nameof(TimeSpan) + " was unexpectedly too big for " + nameof(DiscriminatedUnion128)); + if (sizeof(Guid) > 16) throw new InvalidOperationException(nameof(Guid) + " was unexpectedly too big for " + nameof(DiscriminatedUnion128)); + } +#endif + [FieldOffset(0)] private readonly int _discriminator; // note that we can't pack further because Object needs x8 alignment/padding on x64 + + /// The value typed as Int64 + [FieldOffset(8)] public readonly long Int64; + /// The value typed as UInt64 + [FieldOffset(8)] public readonly ulong UInt64; + /// The value typed as Int32 + [FieldOffset(8)] public readonly int Int32; + /// The value typed as UInt32 + [FieldOffset(8)] public readonly uint UInt32; + /// The value typed as Boolean + [FieldOffset(8)] public readonly bool Boolean; + /// The value typed as Single + [FieldOffset(8)] public readonly float Single; + /// The value typed as Double + [FieldOffset(8)] public readonly double Double; + /// The value typed as DateTime + [FieldOffset(8)] public readonly DateTime DateTime; + /// The value typed as TimeSpan + [FieldOffset(8)] public readonly TimeSpan TimeSpan; + /// The value typed as Guid + [FieldOffset(8)] public readonly Guid Guid; + + private DiscriminatedUnion128(int discriminator) : this() + { + _discriminator = discriminator; + } + + /// Indicates whether the specified discriminator is assigned + public bool Is(int discriminator) => _discriminator == discriminator; + + /// Create a new discriminated union value + public DiscriminatedUnion128(int discriminator, long value) : this(discriminator) { Int64 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128(int discriminator, int value) : this(discriminator) { Int32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128(int discriminator, ulong value) : this(discriminator) { UInt64 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128(int discriminator, uint value) : this(discriminator) { UInt32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128(int discriminator, float value) : this(discriminator) { Single = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128(int discriminator, double value) : this(discriminator) { Double = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128(int discriminator, bool value) : this(discriminator) { Boolean = value; } + /// Create a new discriminated union value + public DiscriminatedUnion128(int discriminator, DateTime? value) : this(value.HasValue ? discriminator: 0) { DateTime = value.GetValueOrDefault(); } + /// Create a new discriminated union value + public DiscriminatedUnion128(int discriminator, TimeSpan? value) : this(value.HasValue ? discriminator : 0) { TimeSpan = value.GetValueOrDefault(); } + /// Create a new discriminated union value + public DiscriminatedUnion128(int discriminator, Guid? value) : this(value.HasValue ? discriminator : 0) { Guid = value.GetValueOrDefault(); } + + /// Reset a value if the specified discriminator is assigned + public static void Reset(ref DiscriminatedUnion128 value, int discriminator) + { + if (value.Discriminator == discriminator) value = default; + } + /// The discriminator value + public int Discriminator => _discriminator; + } + + /// Represent multiple types as a union; this is used as part of OneOf - + /// note that it is the caller's responsbility to only read/write the value as the same type + [StructLayout(LayoutKind.Explicit)] + public readonly partial struct DiscriminatedUnion64Object + { +#if !FEAT_SAFE + unsafe static DiscriminatedUnion64Object() + { + if (sizeof(DateTime) > 8) throw new InvalidOperationException(nameof(DateTime) + " was unexpectedly too big for " + nameof(DiscriminatedUnion64Object)); + if (sizeof(TimeSpan) > 8) throw new InvalidOperationException(nameof(TimeSpan) + " was unexpectedly too big for " + nameof(DiscriminatedUnion64Object)); + } +#endif + [FieldOffset(0)] private readonly int _discriminator; // note that we can't pack further because Object needs x8 alignment/padding on x64 + + /// The value typed as Int64 + [FieldOffset(8)] public readonly long Int64; + /// The value typed as UInt64 + [FieldOffset(8)] public readonly ulong UInt64; + /// The value typed as Int32 + [FieldOffset(8)] public readonly int Int32; + /// The value typed as UInt32 + [FieldOffset(8)] public readonly uint UInt32; + /// The value typed as Boolean + [FieldOffset(8)] public readonly bool Boolean; + /// The value typed as Single + [FieldOffset(8)] public readonly float Single; + /// The value typed as Double + [FieldOffset(8)] public readonly double Double; + /// The value typed as DateTime + [FieldOffset(8)] public readonly DateTime DateTime; + /// The value typed as TimeSpan + [FieldOffset(8)] public readonly TimeSpan TimeSpan; + /// The value typed as Object + [FieldOffset(16)] public readonly object Object; + + private DiscriminatedUnion64Object(int discriminator) : this() + { + _discriminator = discriminator; + } + + /// Indicates whether the specified discriminator is assigned + public bool Is(int discriminator) => _discriminator == discriminator; + + /// Create a new discriminated union value + public DiscriminatedUnion64Object(int discriminator, long value) : this(discriminator) { Int64 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64Object(int discriminator, int value) : this(discriminator) { Int32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64Object(int discriminator, ulong value) : this(discriminator) { UInt64 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64Object(int discriminator, uint value) : this(discriminator) { UInt32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64Object(int discriminator, float value) : this(discriminator) { Single = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64Object(int discriminator, double value) : this(discriminator) { Double = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64Object(int discriminator, bool value) : this(discriminator) { Boolean = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64Object(int discriminator, object value) : this(value != null ? discriminator : 0) { Object = value; } + /// Create a new discriminated union value + public DiscriminatedUnion64Object(int discriminator, DateTime? value) : this(value.HasValue ? discriminator: 0) { DateTime = value.GetValueOrDefault(); } + /// Create a new discriminated union value + public DiscriminatedUnion64Object(int discriminator, TimeSpan? value) : this(value.HasValue ? discriminator : 0) { TimeSpan = value.GetValueOrDefault(); } + + /// Reset a value if the specified discriminator is assigned + public static void Reset(ref DiscriminatedUnion64Object value, int discriminator) + { + if (value.Discriminator == discriminator) value = default; + } + /// The discriminator value + public int Discriminator => _discriminator; + } + + /// Represent multiple types as a union; this is used as part of OneOf - + /// note that it is the caller's responsbility to only read/write the value as the same type + [StructLayout(LayoutKind.Explicit)] + public readonly partial struct DiscriminatedUnion32 + { + [FieldOffset(0)] private readonly int _discriminator; + + /// The value typed as Int32 + [FieldOffset(4)] public readonly int Int32; + /// The value typed as UInt32 + [FieldOffset(4)] public readonly uint UInt32; + /// The value typed as Boolean + [FieldOffset(4)] public readonly bool Boolean; + /// The value typed as Single + [FieldOffset(4)] public readonly float Single; + + private DiscriminatedUnion32(int discriminator) : this() + { + _discriminator = discriminator; + } + + /// Indicates whether the specified discriminator is assigned + public bool Is(int discriminator) => _discriminator == discriminator; + + /// Create a new discriminated union value + public DiscriminatedUnion32(int discriminator, int value) : this(discriminator) { Int32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion32(int discriminator, uint value) : this(discriminator) { UInt32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion32(int discriminator, float value) : this(discriminator) { Single = value; } + /// Create a new discriminated union value + public DiscriminatedUnion32(int discriminator, bool value) : this(discriminator) { Boolean = value; } + + /// Reset a value if the specified discriminator is assigned + public static void Reset(ref DiscriminatedUnion32 value, int discriminator) + { + if (value.Discriminator == discriminator) value = default; + } + /// The discriminator value + public int Discriminator => _discriminator; + } + + /// Represent multiple types as a union; this is used as part of OneOf - + /// note that it is the caller's responsbility to only read/write the value as the same type + [StructLayout(LayoutKind.Explicit)] + public readonly partial struct DiscriminatedUnion32Object + { + [FieldOffset(0)] private readonly int _discriminator; + + /// The value typed as Int32 + [FieldOffset(4)] public readonly int Int32; + /// The value typed as UInt32 + [FieldOffset(4)] public readonly uint UInt32; + /// The value typed as Boolean + [FieldOffset(4)] public readonly bool Boolean; + /// The value typed as Single + [FieldOffset(4)] public readonly float Single; + /// The value typed as Object + [FieldOffset(8)] public readonly object Object; + + private DiscriminatedUnion32Object(int discriminator) : this() + { + _discriminator = discriminator; + } + + /// Indicates whether the specified discriminator is assigned + public bool Is(int discriminator) => _discriminator == discriminator; + + /// Create a new discriminated union value + public DiscriminatedUnion32Object(int discriminator, int value) : this(discriminator) { Int32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion32Object(int discriminator, uint value) : this(discriminator) { UInt32 = value; } + /// Create a new discriminated union value + public DiscriminatedUnion32Object(int discriminator, float value) : this(discriminator) { Single = value; } + /// Create a new discriminated union value + public DiscriminatedUnion32Object(int discriminator, bool value) : this(discriminator) { Boolean = value; } + /// Create a new discriminated union value + public DiscriminatedUnion32Object(int discriminator, object value) : this(value != null ? discriminator : 0) { Object = value; } + + /// Reset a value if the specified discriminator is assigned + public static void Reset(ref DiscriminatedUnion32Object value, int discriminator) + { + if (value.Discriminator == discriminator) value = default; + } + /// The discriminator value + public int Discriminator => _discriminator; + } +} diff --git a/Runtime/Protobuf-net/DiscriminatedUnion.cs.meta b/Runtime/Protobuf-net/DiscriminatedUnion.cs.meta new file mode 100644 index 0000000..3268148 --- /dev/null +++ b/Runtime/Protobuf-net/DiscriminatedUnion.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: ab51817e163a1144bb8518368ba0a465 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Extensible.cs b/Runtime/Protobuf-net/Extensible.cs new file mode 100644 index 0000000..6bd528b --- /dev/null +++ b/Runtime/Protobuf-net/Extensible.cs @@ -0,0 +1,284 @@ +using System; +using System.Collections.Generic; +using ProtoBuf.Meta; +using System.Collections; + +namespace ProtoBuf +{ + /// + /// Simple base class for supporting unexpected fields allowing + /// for loss-less round-tips/merge, even if the data is not understod. + /// The additional fields are (by default) stored in-memory in a buffer. + /// + /// As an example of an alternative implementation, you might + /// choose to use the file system (temporary files) as the back-end, tracking + /// only the paths [such an object would ideally be IDisposable and use + /// a finalizer to ensure that the files are removed]. + /// + public abstract class Extensible : IExtensible + { + // note: not marked ProtoContract - no local state, and can't + // predict sub-classes + + private IExtension extensionObject; + + IExtension IExtensible.GetExtensionObject(bool createIfMissing) + { + return GetExtensionObject(createIfMissing); + } + + /// + /// Retrieves the extension object for the current + /// instance, optionally creating it if it does not already exist. + /// + /// Should a new extension object be + /// created if it does not already exist? + /// The extension object if it exists (or was created), or null + /// if the extension object does not exist or is not available. + /// The createIfMissing argument is false during serialization, + /// and true during deserialization upon encountering unexpected fields. + protected virtual IExtension GetExtensionObject(bool createIfMissing) + { + return GetExtensionObject(ref extensionObject, createIfMissing); + } + + /// + /// Provides a simple, default implementation for extension support, + /// optionally creating it if it does not already exist. Designed to be called by + /// classes implementing . + /// + /// Should a new extension object be + /// created if it does not already exist? + /// The extension field to check (and possibly update). + /// The extension object if it exists (or was created), or null + /// if the extension object does not exist or is not available. + /// The createIfMissing argument is false during serialization, + /// and true during deserialization upon encountering unexpected fields. + public static IExtension GetExtensionObject(ref IExtension extensionObject, bool createIfMissing) + { + if (createIfMissing && extensionObject == null) + { + extensionObject = new BufferExtension(); + } + return extensionObject; + } + +#if !NO_RUNTIME + /// + /// Appends the value as an additional (unexpected) data-field for the instance. + /// Note that for non-repeated sub-objects, this equates to a merge operation; + /// for repeated sub-objects this adds a new instance to the set; for simple + /// values the new value supercedes the old value. + /// + /// Note that appending a value does not remove the old value from + /// the stream; avoid repeatedly appending values for the same field. + /// The type of the value to append. + /// The extensible object to append the value to. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// The value to append. + public static void AppendValue(IExtensible instance, int tag, TValue value) + { + AppendValue(instance, tag, DataFormat.Default, value); + } + + /// + /// Appends the value as an additional (unexpected) data-field for the instance. + /// Note that for non-repeated sub-objects, this equates to a merge operation; + /// for repeated sub-objects this adds a new instance to the set; for simple + /// values the new value supercedes the old value. + /// + /// Note that appending a value does not remove the old value from + /// the stream; avoid repeatedly appending values for the same field. + /// The data-type of the field. + /// The data-format to use when encoding the value. + /// The extensible object to append the value to. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// The value to append. + public static void AppendValue(IExtensible instance, int tag, DataFormat format, TValue value) + { + ExtensibleUtil.AppendExtendValue(RuntimeTypeModel.Default, instance, tag, format, value); + } + /// + /// Queries an extensible object for an additional (unexpected) data-field for the instance. + /// The value returned is the composed value after merging any duplicated content; if the + /// value is "repeated" (a list), then use GetValues instead. + /// + /// The data-type of the field. + /// The extensible object to obtain the value from. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// The effective value of the field, or the default value if not found. + public static TValue GetValue(IExtensible instance, int tag) + { + return GetValue(instance, tag, DataFormat.Default); + } + + /// + /// Queries an extensible object for an additional (unexpected) data-field for the instance. + /// The value returned is the composed value after merging any duplicated content; if the + /// value is "repeated" (a list), then use GetValues instead. + /// + /// The data-type of the field. + /// The extensible object to obtain the value from. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// The data-format to use when decoding the value. + /// The effective value of the field, or the default value if not found. + public static TValue GetValue(IExtensible instance, int tag, DataFormat format) + { + TryGetValue(instance, tag, format, out TValue value); + return value; + } + + /// + /// Queries an extensible object for an additional (unexpected) data-field for the instance. + /// The value returned (in "value") is the composed value after merging any duplicated content; + /// if the value is "repeated" (a list), then use GetValues instead. + /// + /// The data-type of the field. + /// The effective value of the field, or the default value if not found. + /// The extensible object to obtain the value from. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// True if data for the field was present, false otherwise. + public static bool TryGetValue(IExtensible instance, int tag, out TValue value) + { + return TryGetValue(instance, tag, DataFormat.Default, out value); + } + + /// + /// Queries an extensible object for an additional (unexpected) data-field for the instance. + /// The value returned (in "value") is the composed value after merging any duplicated content; + /// if the value is "repeated" (a list), then use GetValues instead. + /// + /// The data-type of the field. + /// The effective value of the field, or the default value if not found. + /// The extensible object to obtain the value from. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// The data-format to use when decoding the value. + /// True if data for the field was present, false otherwise. + public static bool TryGetValue(IExtensible instance, int tag, DataFormat format, out TValue value) + { + return TryGetValue(instance, tag, format, false, out value); + } + + /// + /// Queries an extensible object for an additional (unexpected) data-field for the instance. + /// The value returned (in "value") is the composed value after merging any duplicated content; + /// if the value is "repeated" (a list), then use GetValues instead. + /// + /// The data-type of the field. + /// The effective value of the field, or the default value if not found. + /// The extensible object to obtain the value from. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// The data-format to use when decoding the value. + /// Allow tags that are present as part of the definition; for example, to query unknown enum values. + /// True if data for the field was present, false otherwise. + public static bool TryGetValue(IExtensible instance, int tag, DataFormat format, bool allowDefinedTag, out TValue value) + { + value = default; + bool set = false; + foreach (TValue val in ExtensibleUtil.GetExtendedValues(instance, tag, format, true, allowDefinedTag)) + { + // expecting at most one yield... + // but don't break; need to read entire stream + value = val; + set = true; + } + + return set; + } + + /// + /// Queries an extensible object for an additional (unexpected) data-field for the instance. + /// Each occurrence of the field is yielded separately, making this usage suitable for "repeated" + /// (list) fields. + /// + /// The extended data is processed lazily as the enumerator is iterated. + /// The data-type of the field. + /// The extensible object to obtain the value from. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// An enumerator that yields each occurrence of the field. + public static IEnumerable GetValues(IExtensible instance, int tag) + { + return ExtensibleUtil.GetExtendedValues(instance, tag, DataFormat.Default, false, false); + } + + /// + /// Queries an extensible object for an additional (unexpected) data-field for the instance. + /// Each occurrence of the field is yielded separately, making this usage suitable for "repeated" + /// (list) fields. + /// + /// The extended data is processed lazily as the enumerator is iterated. + /// The data-type of the field. + /// The extensible object to obtain the value from. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// The data-format to use when decoding the value. + /// An enumerator that yields each occurrence of the field. + public static IEnumerable GetValues(IExtensible instance, int tag, DataFormat format) + { + return ExtensibleUtil.GetExtendedValues(instance, tag, format, false, false); + } +#endif + + /// + /// Queries an extensible object for an additional (unexpected) data-field for the instance. + /// The value returned (in "value") is the composed value after merging any duplicated content; + /// if the value is "repeated" (a list), then use GetValues instead. + /// + /// The data-type of the field. + /// The model to use for configuration. + /// The effective value of the field, or the default value if not found. + /// The extensible object to obtain the value from. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// The data-format to use when decoding the value. + /// Allow tags that are present as part of the definition; for example, to query unknown enum values. + /// True if data for the field was present, false otherwise. + public static bool TryGetValue(TypeModel model, Type type, IExtensible instance, int tag, DataFormat format, bool allowDefinedTag, out object value) + { + value = null; + bool set = false; + foreach (object val in ExtensibleUtil.GetExtendedValues(model, type, instance, tag, format, true, allowDefinedTag)) + { + // expecting at most one yield... + // but don't break; need to read entire stream + value = val; + set = true; + } + + return set; + } + + /// + /// Queries an extensible object for an additional (unexpected) data-field for the instance. + /// Each occurrence of the field is yielded separately, making this usage suitable for "repeated" + /// (list) fields. + /// + /// The extended data is processed lazily as the enumerator is iterated. + /// The model to use for configuration. + /// The data-type of the field. + /// The extensible object to obtain the value from. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// The data-format to use when decoding the value. + /// An enumerator that yields each occurrence of the field. + public static IEnumerable GetValues(TypeModel model, Type type, IExtensible instance, int tag, DataFormat format) + { + return ExtensibleUtil.GetExtendedValues(model, type, instance, tag, format, false, false); + } + + /// + /// Appends the value as an additional (unexpected) data-field for the instance. + /// Note that for non-repeated sub-objects, this equates to a merge operation; + /// for repeated sub-objects this adds a new instance to the set; for simple + /// values the new value supercedes the old value. + /// + /// Note that appending a value does not remove the old value from + /// the stream; avoid repeatedly appending values for the same field. + /// The model to use for configuration. + /// The data-format to use when encoding the value. + /// The extensible object to append the value to. + /// The field identifier; the tag should not be defined as a known data-field for the instance. + /// The value to append. + public static void AppendValue(TypeModel model, IExtensible instance, int tag, DataFormat format, object value) + { + ExtensibleUtil.AppendExtendValue(model, instance, tag, format, value); + } + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/Extensible.cs.meta b/Runtime/Protobuf-net/Extensible.cs.meta new file mode 100644 index 0000000..ac4ec36 --- /dev/null +++ b/Runtime/Protobuf-net/Extensible.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: fc24b62dbd0b19642bce397e2b061aa0 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ExtensibleUtil.cs b/Runtime/Protobuf-net/ExtensibleUtil.cs new file mode 100644 index 0000000..9cc1613 --- /dev/null +++ b/Runtime/Protobuf-net/ExtensibleUtil.cs @@ -0,0 +1,118 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using ProtoBuf.Meta; + +namespace ProtoBuf +{ + /// + /// This class acts as an internal wrapper allowing us to do a dynamic + /// methodinfo invoke; an't put into Serializer as don't want on public + /// API; can't put into Serializer<T> since we need to invoke + /// across classes + /// + internal static class ExtensibleUtil + { + +#if !NO_RUNTIME + /// + /// All this does is call GetExtendedValuesTyped with the correct type for "instance"; + /// this ensures that we don't get issues with subclasses declaring conflicting types - + /// the caller must respect the fields defined for the type they pass in. + /// + internal static IEnumerable GetExtendedValues(IExtensible instance, int tag, DataFormat format, bool singleton, bool allowDefinedTag) + { + foreach (TValue value in GetExtendedValues(RuntimeTypeModel.Default, typeof(TValue), instance, tag, format, singleton, allowDefinedTag)) + { + yield return value; + } + } +#endif + /// + /// All this does is call GetExtendedValuesTyped with the correct type for "instance"; + /// this ensures that we don't get issues with subclasses declaring conflicting types - + /// the caller must respect the fields defined for the type they pass in. + /// + internal static IEnumerable GetExtendedValues(TypeModel model, Type type, IExtensible instance, int tag, DataFormat format, bool singleton, bool allowDefinedTag) + { + if (instance == null) throw new ArgumentNullException(nameof(instance)); + if (tag <= 0) throw new ArgumentOutOfRangeException(nameof(tag)); + IExtension extn = instance.GetExtensionObject(false); + + if (extn == null) + { + yield break; + } + + Stream stream = extn.BeginQuery(); + object value = null; + ProtoReader reader = null; + try + { + SerializationContext ctx = new SerializationContext(); + reader = ProtoReader.Create(stream, model, ctx, ProtoReader.TO_EOF); + while (model.TryDeserializeAuxiliaryType(reader, format, tag, type, ref value, true, true, false, false, null) && value != null) + { + if (!singleton) + { + yield return value; + + value = null; // fresh item each time + } + } + if (singleton && value != null) + { + yield return value; + } + } + finally + { + ProtoReader.Recycle(reader); + extn.EndQuery(stream); + } + } + + internal static void AppendExtendValue(TypeModel model, IExtensible instance, int tag, DataFormat format, object value) + { + if (instance == null) throw new ArgumentNullException(nameof(instance)); + if (value == null) throw new ArgumentNullException(nameof(value)); + + // TODO + //model.CheckTagNotInUse(tag); + + // obtain the extension object and prepare to write + IExtension extn = instance.GetExtensionObject(true); + if (extn == null) throw new InvalidOperationException("No extension object available; appended data would be lost."); + bool commit = false; + Stream stream = extn.BeginAppend(); + try + { + using (ProtoWriter writer = ProtoWriter.Create(stream, model, null)) + { + model.TrySerializeAuxiliaryType(writer, null, format, tag, value, false, null); + writer.Close(); + } + commit = true; + } + finally + { + extn.EndAppend(stream, commit); + } + } + + // /// + // /// Stores the given value into the instance's stream; the serializer + // /// is inferred from TValue and format. + // /// + // /// Needs to be public to be callable thru reflection in Silverlight + // public static void AppendExtendValueTyped( + // TypeModel model, TSource instance, int tag, DataFormat format, TValue value) + // where TSource : class, IExtensible + // { + // AppendExtendValue(model, instance, tag, format, value); + // } + + } + +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/ExtensibleUtil.cs.meta b/Runtime/Protobuf-net/ExtensibleUtil.cs.meta new file mode 100644 index 0000000..ea420c6 --- /dev/null +++ b/Runtime/Protobuf-net/ExtensibleUtil.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: dc71d3f5e8f25ad41bb04ea933cee56e +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/GlobalSuppressions.cs b/Runtime/Protobuf-net/GlobalSuppressions.cs new file mode 100644 index 0000000..48b9190 Binary files /dev/null and b/Runtime/Protobuf-net/GlobalSuppressions.cs differ diff --git a/Runtime/Protobuf-net/GlobalSuppressions.cs.meta b/Runtime/Protobuf-net/GlobalSuppressions.cs.meta new file mode 100644 index 0000000..14ab4e3 --- /dev/null +++ b/Runtime/Protobuf-net/GlobalSuppressions.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: c110f96e5d6da4f498bcb6d5fa673be7 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Helpers.cs b/Runtime/Protobuf-net/Helpers.cs new file mode 100644 index 0000000..1a0491d --- /dev/null +++ b/Runtime/Protobuf-net/Helpers.cs @@ -0,0 +1,638 @@ + +using System; +using System.Collections; +using System.Collections.Generic; +using System.IO; +using System.Text; +#if COREFX +using System.Linq; +#endif +#if PROFILE259 +using System.Reflection; +using System.Linq; +#else +using System.Reflection; +#endif + +namespace ProtoBuf +{ + /// + /// Not all frameworks are created equal (fx1.1 vs fx2.0, + /// micro-framework, compact-framework, + /// silverlight, etc). This class simply wraps up a few things that would + /// otherwise make the real code unnecessarily messy, providing fallback + /// implementations if necessary. + /// + internal sealed class Helpers + { + private Helpers() { } + + public static StringBuilder AppendLine(StringBuilder builder) + { + return builder.AppendLine(); + } + + [System.Diagnostics.Conditional("DEBUG")] + public static void DebugWriteLine(string message, object obj) + { +#if DEBUG + string suffix; + try + { + suffix = obj == null ? "(null)" : obj.ToString(); + } + catch + { + suffix = "(exception)"; + } + DebugWriteLine(message + ": " + suffix); +#endif + } + [System.Diagnostics.Conditional("DEBUG")] + public static void DebugWriteLine(string message) + { +#if DEBUG + System.Diagnostics.Debug.WriteLine(message); +#endif + } + [System.Diagnostics.Conditional("TRACE")] + public static void TraceWriteLine(string message) + { +#if TRACE +#if CF2 || PORTABLE || COREFX || PROFILE259 + System.Diagnostics.Debug.WriteLine(message); +#else + System.Diagnostics.Trace.WriteLine(message); +#endif +#endif + } + + [System.Diagnostics.Conditional("DEBUG")] + public static void DebugAssert(bool condition, string message) + { +#if DEBUG + if (!condition) + { + System.Diagnostics.Debug.Assert(false, message); + } +#endif + } + [System.Diagnostics.Conditional("DEBUG")] + public static void DebugAssert(bool condition, string message, params object[] args) + { +#if DEBUG + if (!condition) DebugAssert(false, string.Format(message, args)); +#endif + } + [System.Diagnostics.Conditional("DEBUG")] + public static void DebugAssert(bool condition) + { +#if DEBUG + if (!condition && System.Diagnostics.Debugger.IsAttached) System.Diagnostics.Debugger.Break(); + System.Diagnostics.Debug.Assert(condition); +#endif + } +#if !NO_RUNTIME + public static void Sort(int[] keys, object[] values) + { + // bubble-sort; it'll work on MF, has small code, + // and works well-enough for our sizes. This approach + // also allows us to do `int` compares without having + // to go via IComparable etc, so win:win + bool swapped; + do + { + swapped = false; + for (int i = 1; i < keys.Length; i++) + { + if (keys[i - 1] > keys[i]) + { + int tmpKey = keys[i]; + keys[i] = keys[i - 1]; + keys[i - 1] = tmpKey; + object tmpValue = values[i]; + values[i] = values[i - 1]; + values[i - 1] = tmpValue; + swapped = true; + } + } + } while (swapped); + } +#endif + +#if COREFX + internal static MemberInfo GetInstanceMember(TypeInfo declaringType, string name) + { + var members = declaringType.AsType().GetMember(name, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + switch(members.Length) + { + case 0: return null; + case 1: return members[0]; + default: throw new AmbiguousMatchException(name); + } + } + internal static MethodInfo GetInstanceMethod(Type declaringType, string name) + { + foreach (MethodInfo method in declaringType.GetMethods(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)) + { + if (method.Name == name) return method; + } + return null; + } + internal static MethodInfo GetInstanceMethod(TypeInfo declaringType, string name) + { + return GetInstanceMethod(declaringType.AsType(), name); ; + } + internal static MethodInfo GetStaticMethod(Type declaringType, string name) + { + foreach (MethodInfo method in declaringType.GetMethods(BindingFlags.Static | BindingFlags.Public | BindingFlags.NonPublic)) + { + if (method.Name == name) return method; + } + return null; + } + + internal static MethodInfo GetStaticMethod(TypeInfo declaringType, string name) + { + return GetStaticMethod(declaringType.AsType(), name); + } + internal static MethodInfo GetStaticMethod(Type declaringType, string name, Type[] parameterTypes) + { + foreach(MethodInfo method in declaringType.GetMethods(BindingFlags.Static | BindingFlags.Public | BindingFlags.NonPublic)) + { + if (method.Name == name && IsMatch(method.GetParameters(), parameterTypes)) return method; + } + return null; + } + internal static MethodInfo GetInstanceMethod(Type declaringType, string name, Type[] parameterTypes) + { + foreach (MethodInfo method in declaringType.GetMethods(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)) + { + if (method.Name == name && IsMatch(method.GetParameters(), parameterTypes)) return method; + } + return null; + } + internal static MethodInfo GetInstanceMethod(TypeInfo declaringType, string name, Type[] types) + { + return GetInstanceMethod(declaringType.AsType(), name, types); + } +#elif PROFILE259 + internal static MemberInfo GetInstanceMember(TypeInfo declaringType, string name) + { + IEnumerable members = declaringType.DeclaredMembers; + IList found = new List(); + foreach (MemberInfo member in members) + { + if (member.Name.Equals(name)) + { + found.Add(member); + } + } + switch (found.Count) + { + case 0: return null; + case 1: return found.First(); + default: throw new AmbiguousMatchException(name); + } + } + internal static MethodInfo GetInstanceMethod(Type declaringType, string name) + { + var methods = declaringType.GetRuntimeMethods(); + foreach (MethodInfo method in methods) + { + if (method.Name == name) + { + return method; + } + } + return null; + } + internal static MethodInfo GetInstanceMethod(TypeInfo declaringType, string name) + { + return GetInstanceMethod(declaringType.AsType(), name); ; + } + internal static MethodInfo GetStaticMethod(Type declaringType, string name) + { + var methods = declaringType.GetRuntimeMethods(); + foreach (MethodInfo method in methods) + { + if (method.Name == name) + { + return method; + } + } + return null; + } + + internal static MethodInfo GetStaticMethod(TypeInfo declaringType, string name) + { + return GetStaticMethod(declaringType.AsType(), name); + } + internal static MethodInfo GetStaticMethod(Type declaringType, string name, Type[] parameterTypes) + { + var methods = declaringType.GetRuntimeMethods(); + foreach (MethodInfo method in methods) + { + if (method.Name == name && + IsMatch(method.GetParameters(), parameterTypes)) + { + return method; + } + } + return null; + } + internal static MethodInfo GetInstanceMethod(Type declaringType, string name, Type[] parameterTypes) + { + var methods = declaringType.GetRuntimeMethods(); + foreach (MethodInfo method in methods) + { + if (method.Name == name && + IsMatch(method.GetParameters(), parameterTypes)) + { + return method; + } + } + return null; + } + internal static MethodInfo GetInstanceMethod(TypeInfo declaringType, string name, Type[] types) + { + return GetInstanceMethod(declaringType.AsType(), name, types); + } +#else + internal static MethodInfo GetInstanceMethod(Type declaringType, string name) + { + return declaringType.GetMethod(name, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + } + internal static MethodInfo GetStaticMethod(Type declaringType, string name) + { + return declaringType.GetMethod(name, BindingFlags.Static | BindingFlags.Public | BindingFlags.NonPublic); + } + internal static MethodInfo GetStaticMethod(Type declaringType, string name, Type[] parameterTypes) + { +#if PORTABLE + foreach (MethodInfo method in declaringType.GetMethods(BindingFlags.Static | BindingFlags.Public | BindingFlags.NonPublic)) + { + if (method.Name == name && IsMatch(method.GetParameters(), parameterTypes)) return method; + } + return null; +#else + return declaringType.GetMethod(name, BindingFlags.Static | BindingFlags.Public | BindingFlags.NonPublic, null, parameterTypes, null); +#endif + } + internal static MethodInfo GetInstanceMethod(Type declaringType, string name, Type[] types) + { + if (types == null) types = EmptyTypes; +#if PORTABLE || COREFX + MethodInfo method = declaringType.GetMethod(name, types); + if (method != null && method.IsStatic) method = null; + return method; +#else + return declaringType.GetMethod(name, BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic, + null, types, null); +#endif + } +#endif + + internal static bool IsSubclassOf(Type type, Type baseClass) + { +#if COREFX || PROFILE259 + return type.GetTypeInfo().IsSubclassOf(baseClass); +#else + return type.IsSubclassOf(baseClass); +#endif + } + + public readonly static Type[] EmptyTypes = +#if PORTABLE || CF2 || CF35 || PROFILE259 + new Type[0]; +#else + Type.EmptyTypes; +#endif + +#if COREFX || PROFILE259 + private static readonly Type[] knownTypes = new Type[] { + typeof(bool), typeof(char), typeof(sbyte), typeof(byte), + typeof(short), typeof(ushort), typeof(int), typeof(uint), + typeof(long), typeof(ulong), typeof(float), typeof(double), + typeof(decimal), typeof(string), + typeof(DateTime), typeof(TimeSpan), typeof(Guid), typeof(Uri), + typeof(byte[]), typeof(Type)}; + private static readonly ProtoTypeCode[] knownCodes = new ProtoTypeCode[] { + ProtoTypeCode.Boolean, ProtoTypeCode.Char, ProtoTypeCode.SByte, ProtoTypeCode.Byte, + ProtoTypeCode.Int16, ProtoTypeCode.UInt16, ProtoTypeCode.Int32, ProtoTypeCode.UInt32, + ProtoTypeCode.Int64, ProtoTypeCode.UInt64, ProtoTypeCode.Single, ProtoTypeCode.Double, + ProtoTypeCode.Decimal, ProtoTypeCode.String, + ProtoTypeCode.DateTime, ProtoTypeCode.TimeSpan, ProtoTypeCode.Guid, ProtoTypeCode.Uri, + ProtoTypeCode.ByteArray, ProtoTypeCode.Type + }; + +#endif + + public static ProtoTypeCode GetTypeCode(Type type) + { +#if COREFX || PROFILE259 + if (IsEnum(type)) + { + type = Enum.GetUnderlyingType(type); + } + int idx = Array.IndexOf(knownTypes, type); + if (idx >= 0) return knownCodes[idx]; + return type == null ? ProtoTypeCode.Empty : ProtoTypeCode.Unknown; +#else + TypeCode code = Type.GetTypeCode(type); + switch (code) + { + case TypeCode.Empty: + case TypeCode.Boolean: + case TypeCode.Char: + case TypeCode.SByte: + case TypeCode.Byte: + case TypeCode.Int16: + case TypeCode.UInt16: + case TypeCode.Int32: + case TypeCode.UInt32: + case TypeCode.Int64: + case TypeCode.UInt64: + case TypeCode.Single: + case TypeCode.Double: + case TypeCode.Decimal: + case TypeCode.DateTime: + case TypeCode.String: + return (ProtoTypeCode)code; + } + if (type == typeof(TimeSpan)) return ProtoTypeCode.TimeSpan; + if (type == typeof(Guid)) return ProtoTypeCode.Guid; + if (type == typeof(Uri)) return ProtoTypeCode.Uri; +#if PORTABLE + // In PCLs, the Uri type may not match (WinRT uses Internal/Uri, .Net uses System/Uri), so match on the full name instead + if (type.FullName == typeof(Uri).FullName) return ProtoTypeCode.Uri; +#endif + if (type == typeof(byte[])) return ProtoTypeCode.ByteArray; + if (type == typeof(Type)) return ProtoTypeCode.Type; + + return ProtoTypeCode.Unknown; +#endif + } + + internal static Type GetUnderlyingType(Type type) + { + return Nullable.GetUnderlyingType(type); + } + + internal static bool IsValueType(Type type) + { +#if COREFX || PROFILE259 + return type.GetTypeInfo().IsValueType; +#else + return type.IsValueType; +#endif + } + internal static bool IsSealed(Type type) + { +#if COREFX || PROFILE259 + return type.GetTypeInfo().IsSealed; +#else + return type.IsSealed; +#endif + } + internal static bool IsClass(Type type) + { +#if COREFX || PROFILE259 + return type.GetTypeInfo().IsClass; +#else + return type.IsClass; +#endif + } + + internal static bool IsEnum(Type type) + { +#if COREFX || PROFILE259 + return type.GetTypeInfo().IsEnum; +#else + return type.IsEnum; +#endif + } + + internal static MethodInfo GetGetMethod(PropertyInfo property, bool nonPublic, bool allowInternal) + { + if (property == null) return null; +#if COREFX || PROFILE259 + MethodInfo method = property.GetMethod; + if (!nonPublic && method != null && !method.IsPublic) method = null; + return method; +#else + MethodInfo method = property.GetGetMethod(nonPublic); + if (method == null && !nonPublic && allowInternal) + { // could be "internal" or "protected internal"; look for a non-public, then back-check + method = property.GetGetMethod(true); + if (method == null && !(method.IsAssembly || method.IsFamilyOrAssembly)) + { + method = null; + } + } + return method; +#endif + } + internal static MethodInfo GetSetMethod(PropertyInfo property, bool nonPublic, bool allowInternal) + { + if (property == null) return null; +#if COREFX || PROFILE259 + MethodInfo method = property.SetMethod; + if (!nonPublic && method != null && !method.IsPublic) method = null; + return method; +#else + MethodInfo method = property.GetSetMethod(nonPublic); + if (method == null && !nonPublic && allowInternal) + { // could be "internal" or "protected internal"; look for a non-public, then back-check + method = property.GetGetMethod(true); + if (method == null && !(method.IsAssembly || method.IsFamilyOrAssembly)) + { + method = null; + } + } + return method; +#endif + } + +#if COREFX || PORTABLE || PROFILE259 + private static bool IsMatch(ParameterInfo[] parameters, Type[] parameterTypes) + { + if (parameterTypes == null) parameterTypes = EmptyTypes; + if (parameters.Length != parameterTypes.Length) return false; + for (int i = 0; i < parameters.Length; i++) + { + if (parameters[i].ParameterType != parameterTypes[i]) return false; + } + return true; + } +#endif +#if COREFX || PROFILE259 + internal static ConstructorInfo GetConstructor(Type type, Type[] parameterTypes, bool nonPublic) + { + return GetConstructor(type.GetTypeInfo(), parameterTypes, nonPublic); + } + internal static ConstructorInfo GetConstructor(TypeInfo type, Type[] parameterTypes, bool nonPublic) + { + return GetConstructors(type, nonPublic).SingleOrDefault(ctor => IsMatch(ctor.GetParameters(), parameterTypes)); + } + internal static ConstructorInfo[] GetConstructors(TypeInfo typeInfo, bool nonPublic) + { + return typeInfo.DeclaredConstructors.Where(c => !c.IsStatic && ((!nonPublic && c.IsPublic) || nonPublic)).ToArray(); + } + internal static PropertyInfo GetProperty(Type type, string name, bool nonPublic) + { + return GetProperty(type.GetTypeInfo(), name, nonPublic); + } + internal static PropertyInfo GetProperty(TypeInfo type, string name, bool nonPublic) + { + return type.GetDeclaredProperty(name); + } +#else + + internal static ConstructorInfo GetConstructor(Type type, Type[] parameterTypes, bool nonPublic) + { +#if PORTABLE || COREFX + // pretty sure this will only ever return public, but... + ConstructorInfo ctor = type.GetConstructor(parameterTypes); + return (ctor != null && (nonPublic || ctor.IsPublic)) ? ctor : null; +#else + return type.GetConstructor( + nonPublic ? BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic + : BindingFlags.Instance | BindingFlags.Public, + null, parameterTypes, null); +#endif + + } + internal static ConstructorInfo[] GetConstructors(Type type, bool nonPublic) + { + return type.GetConstructors( + nonPublic ? BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic + : BindingFlags.Instance | BindingFlags.Public); + } + internal static PropertyInfo GetProperty(Type type, string name, bool nonPublic) + { + return type.GetProperty(name, + nonPublic ? BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic + : BindingFlags.Instance | BindingFlags.Public); + } +#endif + + + internal static object ParseEnum(Type type, string value) + { + return Enum.Parse(type, value, true); + } + + + internal static MemberInfo[] GetInstanceFieldsAndProperties(Type type, bool publicOnly) + { +#if PROFILE259 + var members = new List(); + foreach (FieldInfo field in type.GetRuntimeFields()) + { + if (field.IsStatic) continue; + if (field.IsPublic || !publicOnly) members.Add(field); + } + foreach (PropertyInfo prop in type.GetRuntimeProperties()) + { + MethodInfo getter = Helpers.GetGetMethod(prop, true, true); + if (getter == null || getter.IsStatic) continue; + if (getter.IsPublic || !publicOnly) members.Add(prop); + } + return members.ToArray(); +#else + BindingFlags flags = publicOnly ? BindingFlags.Public | BindingFlags.Instance : BindingFlags.Public | BindingFlags.Instance | BindingFlags.NonPublic; + PropertyInfo[] props = type.GetProperties(flags); + FieldInfo[] fields = type.GetFields(flags); + MemberInfo[] members = new MemberInfo[fields.Length + props.Length]; + props.CopyTo(members, 0); + fields.CopyTo(members, props.Length); + return members; +#endif + } + + internal static Type GetMemberType(MemberInfo member) + { +#if PORTABLE || COREFX || PROFILE259 + if (member is PropertyInfo prop) return prop.PropertyType; + FieldInfo fld = member as FieldInfo; + return fld?.FieldType; +#else + switch (member.MemberType) + { + case MemberTypes.Field: return ((FieldInfo)member).FieldType; + case MemberTypes.Property: return ((PropertyInfo)member).PropertyType; + default: return null; + } +#endif + } + + internal static bool IsAssignableFrom(Type target, Type type) + { +#if PROFILE259 + return target.GetTypeInfo().IsAssignableFrom(type.GetTypeInfo()); +#else + return target.IsAssignableFrom(type); +#endif + } + internal static Assembly GetAssembly(Type type) + { +#if COREFX || PROFILE259 + return type.GetTypeInfo().Assembly; +#else + return type.Assembly; +#endif + } + internal static byte[] GetBuffer(MemoryStream ms) + { +#if COREFX + if(!ms.TryGetBuffer(out var segment)) + { + throw new InvalidOperationException("Unable to obtain underlying MemoryStream buffer"); + } else if(segment.Offset != 0) + { + throw new InvalidOperationException("Underlying MemoryStream buffer was not zero-offset"); + } else + { + return segment.Array; + } +#elif PORTABLE || PROFILE259 + return ms.ToArray(); +#else + return ms.GetBuffer(); +#endif + } + } + /// + /// Intended to be a direct map to regular TypeCode, but: + /// - with missing types + /// - existing on WinRT + /// + internal enum ProtoTypeCode + { + Empty = 0, + Unknown = 1, // maps to TypeCode.Object + Boolean = 3, + Char = 4, + SByte = 5, + Byte = 6, + Int16 = 7, + UInt16 = 8, + Int32 = 9, + UInt32 = 10, + Int64 = 11, + UInt64 = 12, + Single = 13, + Double = 14, + Decimal = 15, + DateTime = 16, + String = 18, + + // additions + TimeSpan = 100, + ByteArray = 101, + Guid = 102, + Uri = 103, + Type = 104 + } +} diff --git a/Runtime/Protobuf-net/Helpers.cs.meta b/Runtime/Protobuf-net/Helpers.cs.meta new file mode 100644 index 0000000..d67edef --- /dev/null +++ b/Runtime/Protobuf-net/Helpers.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 227f762ea287cdf42a9293ea6c481ff8 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/IExtensible.cs b/Runtime/Protobuf-net/IExtensible.cs new file mode 100644 index 0000000..b7c0b57 --- /dev/null +++ b/Runtime/Protobuf-net/IExtensible.cs @@ -0,0 +1,23 @@ + +namespace ProtoBuf +{ + /// + /// Indicates that the implementing type has support for protocol-buffer + /// extensions. + /// + /// Can be implemented by deriving from Extensible. + public interface IExtensible + { + /// + /// Retrieves the extension object for the current + /// instance, optionally creating it if it does not already exist. + /// + /// Should a new extension object be + /// created if it does not already exist? + /// The extension object if it exists (or was created), or null + /// if the extension object does not exist or is not available. + /// The createIfMissing argument is false during serialization, + /// and true during deserialization upon encountering unexpected fields. + IExtension GetExtensionObject(bool createIfMissing); + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/IExtensible.cs.meta b/Runtime/Protobuf-net/IExtensible.cs.meta new file mode 100644 index 0000000..3c8f29a --- /dev/null +++ b/Runtime/Protobuf-net/IExtensible.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b9cd5092c5d6d9d4299fc0c88ebb9390 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/IExtension.cs b/Runtime/Protobuf-net/IExtension.cs new file mode 100644 index 0000000..0a137ac --- /dev/null +++ b/Runtime/Protobuf-net/IExtension.cs @@ -0,0 +1,58 @@ + +using System.IO; +namespace ProtoBuf +{ + /// + /// Provides addition capability for supporting unexpected fields during + /// protocol-buffer serialization/deserialization. This allows for loss-less + /// round-trip/merge, even when the data is not fully understood. + /// + public interface IExtension + { + /// + /// Requests a stream into which any unexpected fields can be persisted. + /// + /// A new stream suitable for storing data. + Stream BeginAppend(); + + /// + /// Indicates that all unexpected fields have now been stored. The + /// implementing class is responsible for closing the stream. If + /// "commit" is not true the data may be discarded. + /// + /// The stream originally obtained by BeginAppend. + /// True if the append operation completed successfully. + void EndAppend(Stream stream, bool commit); + + /// + /// Requests a stream of the unexpected fields previously stored. + /// + /// A prepared stream of the unexpected fields. + Stream BeginQuery(); + + /// + /// Indicates that all unexpected fields have now been read. The + /// implementing class is responsible for closing the stream. + /// + /// The stream originally obtained by BeginQuery. + void EndQuery(Stream stream); + + /// + /// Requests the length of the raw binary stream; this is used + /// when serializing sub-entities to indicate the expected size. + /// + /// The length of the binary stream representing unexpected data. + int GetLength(); + } + + /// + /// Provides the ability to remove all existing extension data + /// + public interface IExtensionResettable : IExtension + { + /// + /// Remove all existing extension data + /// + void Reset(); + } +} diff --git a/Runtime/Protobuf-net/IExtension.cs.meta b/Runtime/Protobuf-net/IExtension.cs.meta new file mode 100644 index 0000000..d5da340 --- /dev/null +++ b/Runtime/Protobuf-net/IExtension.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 8018fb363175787478148842225e7d16 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/IProtoInputT.cs b/Runtime/Protobuf-net/IProtoInputT.cs new file mode 100644 index 0000000..6eaa0ce --- /dev/null +++ b/Runtime/Protobuf-net/IProtoInputT.cs @@ -0,0 +1,13 @@ +namespace ProtoBuf +{ + /// + /// Represents the ability to deserialize values from an input of type + /// + public interface IProtoInput + { + /// + /// Deserialize a value from the input + /// + T Deserialize(TInput source, T value = default, object userState = null); + } +} diff --git a/Runtime/Protobuf-net/IProtoInputT.cs.meta b/Runtime/Protobuf-net/IProtoInputT.cs.meta new file mode 100644 index 0000000..a80bc62 --- /dev/null +++ b/Runtime/Protobuf-net/IProtoInputT.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a6514bacfd3143a49a027f15434586f7 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/IProtoOutputT.cs b/Runtime/Protobuf-net/IProtoOutputT.cs new file mode 100644 index 0000000..1c7dd42 --- /dev/null +++ b/Runtime/Protobuf-net/IProtoOutputT.cs @@ -0,0 +1,55 @@ +using System; + +namespace ProtoBuf +{ + /// + /// Represents the ability to serialize values to an output of type + /// + public interface IProtoOutput + { + /// + /// Serialize the provided value + /// + void Serialize(TOutput destination, T value, object userState = null); + } + + /// + /// Represents the ability to serialize values to an output of type + /// with pre-computation of the length + /// + public interface IMeasuredProtoOutput : IProtoOutput + { + /// + /// Measure the length of a value in advance of serialization + /// + MeasureState Measure(T value, object userState = null); + + /// + /// Serialize the previously measured value + /// + void Serialize(MeasureState measured, TOutput destination); + } + + /// + /// Represents the outcome of computing the length of an object; since this may have required computing lengths + /// for multiple objects, some metadata is retained so that a subsequent serialize operation using + /// this instance can re-use the previously calculated lengths. If the object state changes between the + /// measure and serialize operations, the behavior is undefined. + /// + public struct MeasureState : IDisposable + // note: 2.4.* does not actually implement this API; + // it only advertises it for 3.* capability/feature-testing, i.e. + // callers can check whether a model implements + // IMeasuredProtoOutput, and *work from that* + { + /// + /// Releases all resources associated with this value + /// + public void Dispose() => throw new NotImplementedException(); + + /// + /// Gets the calculated length of this serialize operation, in bytes + /// + public long Length => throw new NotImplementedException(); + } +} diff --git a/Runtime/Protobuf-net/IProtoOutputT.cs.meta b/Runtime/Protobuf-net/IProtoOutputT.cs.meta new file mode 100644 index 0000000..a6e7d86 --- /dev/null +++ b/Runtime/Protobuf-net/IProtoOutputT.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 17c52d90924d69d4aaf31925ea2c90bf +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ImplicitFields.cs b/Runtime/Protobuf-net/ImplicitFields.cs new file mode 100644 index 0000000..211abdd --- /dev/null +++ b/Runtime/Protobuf-net/ImplicitFields.cs @@ -0,0 +1,29 @@ +namespace ProtoBuf +{ + /// + /// Specifies the method used to infer field tags for members of the type + /// under consideration. Tags are deduced using the invariant alphabetic + /// sequence of the members' names; this makes implicit field tags very brittle, + /// and susceptible to changes such as field names (normally an isolated + /// change). + /// + public enum ImplicitFields + { + /// + /// No members are serialized implicitly; all members require a suitable + /// attribute such as [ProtoMember]. This is the recmomended mode for + /// most scenarios. + /// + None = 0, + /// + /// Public properties and fields are eligible for implicit serialization; + /// this treats the public API as a contract. Ordering beings from ImplicitFirstTag. + /// + AllPublic = 1, + /// + /// Public and non-public fields are eligible for implicit serialization; + /// this acts as a state/implementation serializer. Ordering beings from ImplicitFirstTag. + /// + AllFields = 2 + } +} diff --git a/Runtime/Protobuf-net/ImplicitFields.cs.meta b/Runtime/Protobuf-net/ImplicitFields.cs.meta new file mode 100644 index 0000000..6da3bef --- /dev/null +++ b/Runtime/Protobuf-net/ImplicitFields.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b838f9e3c6536bc438e7c31f73c49160 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/KeyValuePairProxy.cs b/Runtime/Protobuf-net/KeyValuePairProxy.cs new file mode 100644 index 0000000..0da5761 --- /dev/null +++ b/Runtime/Protobuf-net/KeyValuePairProxy.cs @@ -0,0 +1,44 @@ +//using System.Collections.Generic; + +//namespace ProtoBuf +//{ +// /// +// /// Mutable version of the common key/value pair struct; used during serialization. This type is intended for internal use only and should not +// /// be used by calling code; it is required to be public for implementation reasons. +// /// +// [ProtoContract] +// public struct KeyValuePairSurrogate +// { +// private TKey key; +// private TValue value; +// /// +// /// The key of the pair. +// /// +// [ProtoMember(1, IsRequired = true)] +// public TKey Key { get { return key; } set { key = value; } } +// /// +// /// The value of the pair. +// /// +// [ProtoMember(2)] +// public TValue Value{ get { return value; } set { this.value = value; } } +// private KeyValuePairSurrogate(TKey key, TValue value) +// { +// this.key = key; +// this.value = value; +// } +// /// +// /// Convert a surrogate instance to a standard pair instance. +// /// +// public static implicit operator KeyValuePair (KeyValuePairSurrogate value) +// { +// return new KeyValuePair(value.key, value.value); +// } +// /// +// /// Convert a standard pair instance to a surrogate instance. +// /// +// public static implicit operator KeyValuePairSurrogate(KeyValuePair value) +// { +// return new KeyValuePairSurrogate(value.Key, value.Value); +// } +// } +//} \ No newline at end of file diff --git a/Runtime/Protobuf-net/KeyValuePairProxy.cs.meta b/Runtime/Protobuf-net/KeyValuePairProxy.cs.meta new file mode 100644 index 0000000..c74b284 --- /dev/null +++ b/Runtime/Protobuf-net/KeyValuePairProxy.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b6221476e2339494cb5ee2bdc10ffd81 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta.meta b/Runtime/Protobuf-net/Meta.meta new file mode 100644 index 0000000..5f17bdd --- /dev/null +++ b/Runtime/Protobuf-net/Meta.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: a70a85c13dddce74d9a6395c440c9156 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/AttributeMap.cs b/Runtime/Protobuf-net/Meta/AttributeMap.cs new file mode 100644 index 0000000..5bab942 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/AttributeMap.cs @@ -0,0 +1,108 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; + +namespace ProtoBuf.Meta +{ + internal abstract class AttributeMap + { +#if DEBUG + [Obsolete("Please use AttributeType instead")] + new public Type GetType() => AttributeType; +#endif + public override string ToString() => AttributeType?.FullName ?? ""; + public abstract bool TryGet(string key, bool publicOnly, out object value); + public bool TryGet(string key, out object value) + { + return TryGet(key, true, out value); + } + public abstract Type AttributeType { get; } + public static AttributeMap[] Create(TypeModel model, Type type, bool inherit) + { + +#if COREFX || PROFILE259 + Attribute[] all = System.Linq.Enumerable.ToArray(System.Linq.Enumerable.OfType(type.GetTypeInfo().GetCustomAttributes(inherit))); +#else + object[] all = type.GetCustomAttributes(inherit); +#endif + AttributeMap[] result = new AttributeMap[all.Length]; + for(int i = 0 ; i < all.Length ; i++) + { + result[i] = new ReflectionAttributeMap((Attribute)all[i]); + } + return result; + } + + public static AttributeMap[] Create(TypeModel model, MemberInfo member, bool inherit) + { + +#if COREFX || PROFILE259 + Attribute[] all = System.Linq.Enumerable.ToArray(System.Linq.Enumerable.OfType(member.GetCustomAttributes(inherit))); +#else + object[] all = member.GetCustomAttributes(inherit); +#endif + AttributeMap[] result = new AttributeMap[all.Length]; + for(int i = 0 ; i < all.Length ; i++) + { + result[i] = new ReflectionAttributeMap((Attribute)all[i]); + } + return result; + } + public static AttributeMap[] Create(TypeModel model, Assembly assembly) + { +#if COREFX || PROFILE259 + Attribute[] all = System.Linq.Enumerable.ToArray(assembly.GetCustomAttributes()); +#else + const bool inherit = false; + object[] all = assembly.GetCustomAttributes(inherit); +#endif + AttributeMap[] result = new AttributeMap[all.Length]; + for(int i = 0 ; i < all.Length ; i++) + { + result[i] = new ReflectionAttributeMap((Attribute)all[i]); + } + return result; + + } + + public abstract object Target { get; } + + private sealed class ReflectionAttributeMap : AttributeMap + { + private readonly Attribute attribute; + + public ReflectionAttributeMap(Attribute attribute) + { + this.attribute = attribute; + } + + public override object Target => attribute; + + public override Type AttributeType => attribute.GetType(); + + public override bool TryGet(string key, bool publicOnly, out object value) + { + MemberInfo[] members = Helpers.GetInstanceFieldsAndProperties(attribute.GetType(), publicOnly); + foreach (MemberInfo member in members) + { + if (string.Equals(member.Name, key, StringComparison.OrdinalIgnoreCase)) + { + if (member is PropertyInfo prop) { + value = prop.GetValue(attribute, null); + return true; + } + if (member is FieldInfo field) { + value = field.GetValue(attribute); + return true; + } + + throw new NotSupportedException(member.GetType().Name); + } + } + value = null; + return false; + } + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Meta/AttributeMap.cs.meta b/Runtime/Protobuf-net/Meta/AttributeMap.cs.meta new file mode 100644 index 0000000..d92ef3a --- /dev/null +++ b/Runtime/Protobuf-net/Meta/AttributeMap.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a3e64de7ef1358447843db562f78060f +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/BasicList.cs b/Runtime/Protobuf-net/Meta/BasicList.cs new file mode 100644 index 0000000..d1308f3 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/BasicList.cs @@ -0,0 +1,267 @@ +using System; +using System.Collections; + +namespace ProtoBuf.Meta +{ + internal sealed class MutableList : BasicList + { + /* Like BasicList, but allows existing values to be changed + */ + public new object this[int index] + { + get { return head[index]; } + set { head[index] = value; } + } + public void RemoveLast() + { + head.RemoveLastWithMutate(); + } + + public void Clear() + { + head.Clear(); + } + } + + internal class BasicList : IEnumerable + { + /* Requirements: + * - Fast access by index + * - Immutable in the tail, so a node can be read (iterated) without locking + * - Lock-free tail handling must match the memory mode; struct for Node + * wouldn't work as "read" would not be atomic + * - Only operation required is append, but this shouldn't go out of its + * way to be inefficient + * - Assume that the caller is handling thread-safety (to co-ordinate with + * other code); no attempt to be thread-safe + * - Assume that the data is private; internal data structure is allowed to + * be mutable (i.e. array is fine as long as we don't screw it up) + */ + private static readonly Node nil = new Node(null, 0); + + public void CopyTo(Array array, int offset) + { + head.CopyTo(array, offset); + } + + protected Node head = nil; + + public int Add(object value) + { + return (head = head.Append(value)).Length - 1; + } + + public object this[int index] => head[index]; + + //public object TryGet(int index) + //{ + // return head.TryGet(index); + //} + + public void Trim() { head = head.Trim(); } + + public int Count => head.Length; + + IEnumerator IEnumerable.GetEnumerator() => new NodeEnumerator(head); + + public NodeEnumerator GetEnumerator() => new NodeEnumerator(head); + + public struct NodeEnumerator : IEnumerator + { + private int position; + private readonly Node node; + internal NodeEnumerator(Node node) + { + this.position = -1; + this.node = node; + } + void IEnumerator.Reset() { position = -1; } + public object Current { get { return node[position]; } } + public bool MoveNext() + { + int len = node.Length; + return (position <= len) && (++position < len); + } + } + + internal sealed class Node + { + public object this[int index] + { + get + { + if (index >= 0 && index < length) + { + return data[index]; + } + throw new ArgumentOutOfRangeException(nameof(index)); + } + set + { + if (index >= 0 && index < length) + { + data[index] = value; + } + else + { + throw new ArgumentOutOfRangeException(nameof(index)); + } + } + } + //public object TryGet(int index) + //{ + // return (index >= 0 && index < length) ? data[index] : null; + //} + private readonly object[] data; + + private int length; + public int Length => length; + + internal Node(object[] data, int length) + { + Helpers.DebugAssert((data == null && length == 0) || + (data != null && length > 0 && length <= data.Length)); + this.data = data; + + this.length = length; + } + + public void RemoveLastWithMutate() + { + if (length == 0) throw new InvalidOperationException(); + length -= 1; + } + + public Node Append(object value) + { + object[] newData; + int newLength = length + 1; + if (data == null) + { + newData = new object[10]; + } + else if (length == data.Length) + { + newData = new object[data.Length * 2]; + Array.Copy(data, newData, length); + } + else + { + newData = data; + } + newData[length] = value; + return new Node(newData, newLength); + } + + public Node Trim() + { + if (length == 0 || length == data.Length) return this; + object[] newData = new object[length]; + Array.Copy(data, newData, length); + return new Node(newData, length); + } + + internal int IndexOfString(string value) + { + for (int i = 0; i < length; i++) + { + if ((string)value == (string)data[i]) return i; + } + return -1; + } + + internal int IndexOfReference(object instance) + { + for (int i = 0; i < length; i++) + { + if ((object)instance == (object)data[i]) return i; + } // ^^^ (object) above should be preserved, even if this was typed; needs + // to be a reference check + return -1; + } + + internal int IndexOf(MatchPredicate predicate, object ctx) + { + for (int i = 0; i < length; i++) + { + if (predicate(data[i], ctx)) return i; + } + return -1; + } + + internal void CopyTo(Array array, int offset) + { + if (length > 0) + { + Array.Copy(data, 0, array, offset, length); + } + } + + internal void Clear() + { + if (data != null) + { + Array.Clear(data, 0, data.Length); + } + length = 0; + } + } + + internal int IndexOf(MatchPredicate predicate, object ctx) + { + return head.IndexOf(predicate, ctx); + } + + internal int IndexOfString(string value) + { + return head.IndexOfString(value); + } + + internal int IndexOfReference(object instance) + { + return head.IndexOfReference(instance); + } + + internal delegate bool MatchPredicate(object value, object ctx); + + internal bool Contains(object value) + { + foreach (object obj in this) + { + if (object.Equals(obj, value)) return true; + } + return false; + } + + internal sealed class Group + { + public readonly int First; + public readonly BasicList Items; + public Group(int first) + { + this.First = first; + this.Items = new BasicList(); + } + } + + internal static BasicList GetContiguousGroups(int[] keys, object[] values) + { + if (keys == null) throw new ArgumentNullException(nameof(keys)); + if (values == null) throw new ArgumentNullException(nameof(values)); + if (values.Length < keys.Length) throw new ArgumentException("Not all keys are covered by values", nameof(values)); + BasicList outer = new BasicList(); + Group group = null; + for (int i = 0; i < keys.Length; i++) + { + if (i == 0 || keys[i] != keys[i - 1]) { group = null; } + if (group == null) + { + group = new Group(keys[i]); + outer.Add(group); + } + group.Items.Add(values[i]); + } + return outer; + } + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/Meta/BasicList.cs.meta b/Runtime/Protobuf-net/Meta/BasicList.cs.meta new file mode 100644 index 0000000..3304e30 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/BasicList.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: be5fc2a1ac0731a44b0365987d942485 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/CallbackSet.cs b/Runtime/Protobuf-net/Meta/CallbackSet.cs new file mode 100644 index 0000000..8b08585 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/CallbackSet.cs @@ -0,0 +1,110 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; + +namespace ProtoBuf.Meta +{ + /// + /// Represents the set of serialization callbacks to be used when serializing/deserializing a type. + /// + public class CallbackSet + { + private readonly MetaType metaType; + internal CallbackSet(MetaType metaType) + { + this.metaType = metaType ?? throw new ArgumentNullException(nameof(metaType)); + } + + internal MethodInfo this[TypeModel.CallbackType callbackType] + { + get + { + switch (callbackType) + { + case TypeModel.CallbackType.BeforeSerialize: return beforeSerialize; + case TypeModel.CallbackType.AfterSerialize: return afterSerialize; + case TypeModel.CallbackType.BeforeDeserialize: return beforeDeserialize; + case TypeModel.CallbackType.AfterDeserialize: return afterDeserialize; + default: throw new ArgumentException("Callback type not supported: " + callbackType.ToString(), "callbackType"); + } + } + } + + internal static bool CheckCallbackParameters(TypeModel model, MethodInfo method) + { + ParameterInfo[] args = method.GetParameters(); + for (int i = 0; i < args.Length; i++) + { + Type paramType = args[i].ParameterType; + if (paramType == model.MapType(typeof(SerializationContext))) { } + else if (paramType == model.MapType(typeof(System.Type))) { } +#if PLAT_BINARYFORMATTER + else if (paramType == model.MapType(typeof(System.Runtime.Serialization.StreamingContext))) { } +#endif + else return false; + } + return true; + } + + private MethodInfo SanityCheckCallback(TypeModel model, MethodInfo callback) + { + metaType.ThrowIfFrozen(); + if (callback == null) return callback; // fine + if (callback.IsStatic) throw new ArgumentException("Callbacks cannot be static", nameof(callback)); + if (callback.ReturnType != model.MapType(typeof(void)) + || !CheckCallbackParameters(model, callback)) + { + throw CreateInvalidCallbackSignature(callback); + } + return callback; + } + + internal static Exception CreateInvalidCallbackSignature(MethodInfo method) + { + return new NotSupportedException("Invalid callback signature in " + method.DeclaringType.FullName + "." + method.Name); + } + + private MethodInfo beforeSerialize, afterSerialize, beforeDeserialize, afterDeserialize; + + /// Called before serializing an instance + public MethodInfo BeforeSerialize + { + get { return beforeSerialize; } + set { beforeSerialize = SanityCheckCallback(metaType.Model, value); } + } + + /// Called before deserializing an instance + public MethodInfo BeforeDeserialize + { + get { return beforeDeserialize; } + set { beforeDeserialize = SanityCheckCallback(metaType.Model, value); } + } + + /// Called after serializing an instance + public MethodInfo AfterSerialize + { + get { return afterSerialize; } + set { afterSerialize = SanityCheckCallback(metaType.Model, value); } + } + + /// Called after deserializing an instance + public MethodInfo AfterDeserialize + { + get { return afterDeserialize; } + set { afterDeserialize = SanityCheckCallback(metaType.Model, value); } + } + + /// + /// True if any callback is set, else False + /// + public bool NonTrivial + { + get + { + return beforeSerialize != null || beforeDeserialize != null + || afterSerialize != null || afterDeserialize != null; + } + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Meta/CallbackSet.cs.meta b/Runtime/Protobuf-net/Meta/CallbackSet.cs.meta new file mode 100644 index 0000000..0c6da40 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/CallbackSet.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: de0e7cb7bfcf4904aa31e910f241a8aa +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/MetaType.cs b/Runtime/Protobuf-net/Meta/MetaType.cs new file mode 100644 index 0000000..8d9bed6 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/MetaType.cs @@ -0,0 +1,2171 @@ +#if !NO_RUNTIME +using System; +using System.Collections; +using System.Text; +using ProtoBuf.Serializers; +using System.Reflection; +using System.Collections.Generic; + +#if PROFILE259 +using System.Linq; +#endif + +namespace ProtoBuf.Meta +{ + /// + /// Represents a type at runtime for use with protobuf, allowing the field mappings (etc) to be defined + /// + public class MetaType : ISerializerProxy + { + internal sealed class Comparer : IComparer, IComparer + { + public static readonly Comparer Default = new Comparer(); + public int Compare(object x, object y) + { + return Compare(x as MetaType, y as MetaType); + } + public int Compare(MetaType x, MetaType y) + { + if (ReferenceEquals(x, y)) return 0; + if (x == null) return -1; + if (y == null) return 1; + + return string.Compare(x.GetSchemaTypeName(), y.GetSchemaTypeName(), StringComparison.Ordinal); + } + } + /// + /// Get the name of the type being represented + /// + public override string ToString() + { + return type.ToString(); + } + + IProtoSerializer ISerializerProxy.Serializer => Serializer; + private MetaType baseType; + + /// + /// Gets the base-type for this type + /// + public MetaType BaseType => baseType; + + internal TypeModel Model => model; + + /// + /// When used to compile a model, should public serialization/deserialzation methods + /// be included for this type? + /// + public bool IncludeSerializerMethod + { // negated to minimize common-case / initializer + get { return !HasFlag(OPTIONS_PrivateOnApi); } + set { SetFlag(OPTIONS_PrivateOnApi, !value, true); } + } + + /// + /// Should this type be treated as a reference by default? + /// + public bool AsReferenceDefault + { + get { return HasFlag(OPTIONS_AsReferenceDefault); } + set { SetFlag(OPTIONS_AsReferenceDefault, value, true); } + } + + private BasicList subTypes; + private bool IsValidSubType(Type subType) + { +#if COREFX || PROFILE259 + return typeInfo.IsAssignableFrom(subType.GetTypeInfo()); +#else + return type.IsAssignableFrom(subType); +#endif + } + /// + /// Adds a known sub-type to the inheritance model + /// + public MetaType AddSubType(int fieldNumber, Type derivedType) + { + return AddSubType(fieldNumber, derivedType, DataFormat.Default); + } + /// + /// Adds a known sub-type to the inheritance model + /// + public MetaType AddSubType(int fieldNumber, Type derivedType, DataFormat dataFormat) + { + if (derivedType == null) throw new ArgumentNullException("derivedType"); + if (fieldNumber < 1) throw new ArgumentOutOfRangeException("fieldNumber"); +#if COREFX || COREFX || PROFILE259 + if (!(typeInfo.IsClass || typeInfo.IsInterface) || typeInfo.IsSealed) { +#else + if (!(type.IsClass || type.IsInterface) || type.IsSealed) + { +#endif + throw new InvalidOperationException("Sub-types can only be added to non-sealed classes"); + } + if (!IsValidSubType(derivedType)) + { + throw new ArgumentException(derivedType.Name + " is not a valid sub-type of " + type.Name, "derivedType"); + } + MetaType derivedMeta = model[derivedType]; + ThrowIfFrozen(); + derivedMeta.ThrowIfFrozen(); + SubType subType = new SubType(fieldNumber, derivedMeta, dataFormat); + ThrowIfFrozen(); + + derivedMeta.SetBaseType(this); // includes ThrowIfFrozen + if (subTypes == null) subTypes = new BasicList(); + subTypes.Add(subType); + model.ResetKeyCache(); + return this; + } +#if COREFX || PROFILE259 + internal static readonly TypeInfo ienumerable = typeof(IEnumerable).GetTypeInfo(); +#else + internal static readonly Type ienumerable = typeof(IEnumerable); +#endif + private void SetBaseType(MetaType baseType) + { + if (baseType == null) throw new ArgumentNullException("baseType"); + if (this.baseType == baseType) return; + if (this.baseType != null) throw new InvalidOperationException($"Type '{this.baseType.Type.FullName}' can only participate in one inheritance hierarchy"); + + MetaType type = baseType; + while (type != null) + { + if (ReferenceEquals(type, this)) throw new InvalidOperationException($"Cyclic inheritance of '{this.baseType.Type.FullName}' is not allowed"); + type = type.baseType; + } + this.baseType = baseType; + } + + private CallbackSet callbacks; + + /// + /// Indicates whether the current type has defined callbacks + /// + public bool HasCallbacks => callbacks != null && callbacks.NonTrivial; + + /// + /// Indicates whether the current type has defined subtypes + /// + public bool HasSubtypes => subTypes != null && subTypes.Count != 0; + + /// + /// Returns the set of callbacks defined for this type + /// + public CallbackSet Callbacks + { + get + { + if (callbacks == null) callbacks = new CallbackSet(this); + return callbacks; + } + } + + private bool IsValueType + { + get + { +#if COREFX || PROFILE259 + return typeInfo.IsValueType; +#else + return type.IsValueType; +#endif + } + } + /// + /// Assigns the callbacks to use during serialiation/deserialization. + /// + /// The method (or null) called before serialization begins. + /// The method (or null) called when serialization is complete. + /// The method (or null) called before deserialization begins (or when a new instance is created during deserialization). + /// The method (or null) called when deserialization is complete. + /// The set of callbacks. + public MetaType SetCallbacks(MethodInfo beforeSerialize, MethodInfo afterSerialize, MethodInfo beforeDeserialize, MethodInfo afterDeserialize) + { + CallbackSet callbacks = Callbacks; + callbacks.BeforeSerialize = beforeSerialize; + callbacks.AfterSerialize = afterSerialize; + callbacks.BeforeDeserialize = beforeDeserialize; + callbacks.AfterDeserialize = afterDeserialize; + return this; + } + /// + /// Assigns the callbacks to use during serialiation/deserialization. + /// + /// The name of the method (or null) called before serialization begins. + /// The name of the method (or null) called when serialization is complete. + /// The name of the method (or null) called before deserialization begins (or when a new instance is created during deserialization). + /// The name of the method (or null) called when deserialization is complete. + /// The set of callbacks. + public MetaType SetCallbacks(string beforeSerialize, string afterSerialize, string beforeDeserialize, string afterDeserialize) + { + if (IsValueType) throw new InvalidOperationException(); + CallbackSet callbacks = Callbacks; + callbacks.BeforeSerialize = ResolveMethod(beforeSerialize, true); + callbacks.AfterSerialize = ResolveMethod(afterSerialize, true); + callbacks.BeforeDeserialize = ResolveMethod(beforeDeserialize, true); + callbacks.AfterDeserialize = ResolveMethod(afterDeserialize, true); + return this; + } + + /// + /// Returns the public Type name of this Type used in serialization + /// + public string GetSchemaTypeName() + { + if (surrogate != null) return model[surrogate].GetSchemaTypeName(); + + if (!string.IsNullOrEmpty(name)) return name; + + string typeName = type.Name; + if (type +#if COREFX || PROFILE259 + .GetTypeInfo() +#endif + .IsGenericType) + { + var sb = new StringBuilder(typeName); + int split = typeName.IndexOf('`'); + if (split >= 0) sb.Length = split; + foreach (Type arg in type +#if COREFX || PROFILE259 + .GetTypeInfo().GenericTypeArguments +#else + .GetGenericArguments() +#endif + ) + { + sb.Append('_'); + Type tmp = arg; + int key = model.GetKey(ref tmp); + MetaType mt; + if (key >= 0 && (mt = model[tmp]) != null && mt.surrogate == null) // <=== need to exclude surrogate to avoid chance of infinite loop + { + + sb.Append(mt.GetSchemaTypeName()); + } + else + { + sb.Append(tmp.Name); + } + } + return sb.ToString(); + } + + return typeName; + } + + private string name; + + /// + /// Gets or sets the name of this contract. + /// + public string Name + { + get + { + return name; + } + set + { + ThrowIfFrozen(); + name = value; + } + } + + private MethodInfo factory; + /// + /// Designate a factory-method to use to create instances of this type + /// + public MetaType SetFactory(MethodInfo factory) + { + model.VerifyFactory(factory, type); + ThrowIfFrozen(); + this.factory = factory; + return this; + } + + /// + /// Designate a factory-method to use to create instances of this type + /// + public MetaType SetFactory(string factory) + { + return SetFactory(ResolveMethod(factory, false)); + } + + private MethodInfo ResolveMethod(string name, bool instance) + { + if (string.IsNullOrEmpty(name)) return null; +#if COREFX + return instance ? Helpers.GetInstanceMethod(typeInfo, name) : Helpers.GetStaticMethod(typeInfo, name); +#else + return instance ? Helpers.GetInstanceMethod(type, name) : Helpers.GetStaticMethod(type, name); +#endif + } + + private readonly RuntimeTypeModel model; + + internal static Exception InbuiltType(Type type) + { + return new ArgumentException("Data of this type has inbuilt behaviour, and cannot be added to a model in this way: " + type.FullName); + } + + internal MetaType(RuntimeTypeModel model, Type type, MethodInfo factory) + { + this.factory = factory; + if (model == null) throw new ArgumentNullException("model"); + if (type == null) throw new ArgumentNullException("type"); + + if (type.IsArray) throw InbuiltType(type); + IProtoSerializer coreSerializer = model.TryGetBasicTypeSerializer(type); + if (coreSerializer != null) + { + throw InbuiltType(type); + } + + this.type = type; +#if COREFX || PROFILE259 + this.typeInfo = type.GetTypeInfo(); +#endif + this.model = model; + + if (Helpers.IsEnum(type)) + { +#if COREFX || PROFILE259 + EnumPassthru = typeInfo.IsDefined(typeof(FlagsAttribute), false); +#else + EnumPassthru = type.IsDefined(model.MapType(typeof(FlagsAttribute)), false); +#endif + } + } +#if COREFX || PROFILE259 + private readonly TypeInfo typeInfo; +#endif + /// + /// Throws an exception if the type has been made immutable + /// + protected internal void ThrowIfFrozen() + { + if ((flags & OPTIONS_Frozen) != 0) throw new InvalidOperationException("The type cannot be changed once a serializer has been generated for " + type.FullName); + } + + // internal void Freeze() { flags |= OPTIONS_Frozen; } + + private readonly Type type; + /// + /// The runtime type that the meta-type represents + /// + public Type Type => type; + + private IProtoTypeSerializer serializer; + internal IProtoTypeSerializer Serializer + { + get + { + if (serializer == null) + { + int opaqueToken = 0; + try + { + model.TakeLock(ref opaqueToken); + if (serializer == null) + { // double-check, but our main purpse with this lock is to ensure thread-safety with + // serializers needing to wait until another thread has finished adding the properties + SetFlag(OPTIONS_Frozen, true, false); + serializer = BuildSerializer(); +#if FEAT_COMPILER + if (model.AutoCompile) CompileInPlace(); +#endif + } + } + finally + { + model.ReleaseLock(opaqueToken); + } + } + return serializer; + } + } + internal bool IsList + { + get + { + Type itemType = IgnoreListHandling ? null : TypeModel.GetListItemType(model, type); + return itemType != null; + } + } + private IProtoTypeSerializer BuildSerializer() + { + if (Helpers.IsEnum(type)) + { + return new TagDecorator(ProtoBuf.Serializer.ListItemTag, WireType.Variant, false, new EnumSerializer(type, GetEnumMap())); + } + Type itemType = IgnoreListHandling ? null : TypeModel.GetListItemType(model, type); + if (itemType != null) + { + if (surrogate != null) + { + throw new ArgumentException("Repeated data (a list, collection, etc) has inbuilt behaviour and cannot use a surrogate"); + } + if (subTypes != null && subTypes.Count != 0) + { + throw new ArgumentException("Repeated data (a list, collection, etc) has inbuilt behaviour and cannot be subclassed"); + } + Type defaultType = null; + ResolveListTypes(model, type, ref itemType, ref defaultType); + ValueMember fakeMember = new ValueMember(model, ProtoBuf.Serializer.ListItemTag, type, itemType, defaultType, DataFormat.Default); + return new TypeSerializer(model, type, new int[] { ProtoBuf.Serializer.ListItemTag }, new IProtoSerializer[] { fakeMember.Serializer }, null, true, true, null, constructType, factory); + } + if (surrogate != null) + { + MetaType mt = model[surrogate], mtBase; + while ((mtBase = mt.baseType) != null) { mt = mtBase; } + return new SurrogateSerializer(model, type, surrogate, mt.Serializer); + } + if (IsAutoTuple) + { + ConstructorInfo ctor = ResolveTupleConstructor(type, out MemberInfo[] mapping); + if (ctor == null) throw new InvalidOperationException(); + return new TupleSerializer(model, ctor, mapping); + } + + fields.Trim(); + int fieldCount = fields.Count; + int subTypeCount = subTypes == null ? 0 : subTypes.Count; + int[] fieldNumbers = new int[fieldCount + subTypeCount]; + IProtoSerializer[] serializers = new IProtoSerializer[fieldCount + subTypeCount]; + int i = 0; + if (subTypeCount != 0) + { + foreach (SubType subType in subTypes) + { +#if COREFX || PROFILE259 + if (!subType.DerivedType.IgnoreListHandling && ienumerable.IsAssignableFrom(subType.DerivedType.Type.GetTypeInfo())) +#else + if (!subType.DerivedType.IgnoreListHandling && model.MapType(ienumerable).IsAssignableFrom(subType.DerivedType.Type)) +#endif + { + throw new ArgumentException("Repeated data (a list, collection, etc) has inbuilt behaviour and cannot be used as a subclass"); + } + fieldNumbers[i] = subType.FieldNumber; + serializers[i++] = subType.Serializer; + } + } + if (fieldCount != 0) + { + foreach (ValueMember member in fields) + { + fieldNumbers[i] = member.FieldNumber; + serializers[i++] = member.Serializer; + } + } + + BasicList baseCtorCallbacks = null; + MetaType tmp = BaseType; + + while (tmp != null) + { + MethodInfo method = tmp.HasCallbacks ? tmp.Callbacks.BeforeDeserialize : null; + if (method != null) + { + if (baseCtorCallbacks == null) baseCtorCallbacks = new BasicList(); + baseCtorCallbacks.Add(method); + } + tmp = tmp.BaseType; + } + MethodInfo[] arr = null; + if (baseCtorCallbacks != null) + { + arr = new MethodInfo[baseCtorCallbacks.Count]; + baseCtorCallbacks.CopyTo(arr, 0); + Array.Reverse(arr); + } + return new TypeSerializer(model, type, fieldNumbers, serializers, arr, baseType == null, UseConstructor, callbacks, constructType, factory); + } + + [Flags] + internal enum AttributeFamily + { + None = 0, ProtoBuf = 1, DataContractSerialier = 2, XmlSerializer = 4, AutoTuple = 8 + } + static Type GetBaseType(MetaType type) + { +#if COREFX || PROFILE259 + return type.typeInfo.BaseType; +#else + return type.type.BaseType; +#endif + } + internal static bool GetAsReferenceDefault(RuntimeTypeModel model, Type type) + { + if (type == null) throw new ArgumentNullException(nameof(type)); + if (Helpers.IsEnum(type)) return false; // never as-ref + AttributeMap[] typeAttribs = AttributeMap.Create(model, type, false); + for (int i = 0; i < typeAttribs.Length; i++) + { + if (typeAttribs[i].AttributeType.FullName == "ProtoBuf.ProtoContractAttribute") + { + if (typeAttribs[i].TryGet("AsReferenceDefault", out object tmp)) return (bool)tmp; + } + } + return false; + } + + internal void ApplyDefaultBehaviour() + { + TypeAddedEventArgs args = null; // allows us to share the event-args between events + RuntimeTypeModel.OnBeforeApplyDefaultBehaviour(this, ref args); + if (args == null || args.ApplyDefaultBehaviour) ApplyDefaultBehaviourImpl(); + RuntimeTypeModel.OnAfterApplyDefaultBehaviour(this, ref args); + } + + internal void ApplyDefaultBehaviourImpl() + { + Type baseType = GetBaseType(this); + if (baseType != null && model.FindWithoutAdd(baseType) == null + && GetContractFamily(model, baseType, null) != MetaType.AttributeFamily.None) + { + model.FindOrAddAuto(baseType, true, false, false); + } + + AttributeMap[] typeAttribs = AttributeMap.Create(model, type, false); + AttributeFamily family = GetContractFamily(model, type, typeAttribs); + if (family == AttributeFamily.AutoTuple) + { + SetFlag(OPTIONS_AutoTuple, true, true); + } + bool isEnum = !EnumPassthru && Helpers.IsEnum(type); + if (family == AttributeFamily.None && !isEnum) return; // and you'd like me to do what, exactly? + + bool enumShouldUseImplicitPassThru = isEnum; + BasicList partialIgnores = null, partialMembers = null; + int dataMemberOffset = 0, implicitFirstTag = 1; + bool inferTagByName = model.InferTagFromNameDefault; + ImplicitFields implicitMode = ImplicitFields.None; + string name = null; + for (int i = 0; i < typeAttribs.Length; i++) + { + AttributeMap item = (AttributeMap)typeAttribs[i]; + object tmp; + string fullAttributeTypeName = item.AttributeType.FullName; + if (!isEnum && fullAttributeTypeName == "ProtoBuf.ProtoIncludeAttribute") + { + int tag = 0; + if (item.TryGet("tag", out tmp)) tag = (int)tmp; + DataFormat dataFormat = DataFormat.Default; + if (item.TryGet("DataFormat", out tmp)) + { + dataFormat = (DataFormat)(int)tmp; + } + Type knownType = null; + try + { + if (item.TryGet("knownTypeName", out tmp)) knownType = model.GetType((string)tmp, type +#if COREFX || PROFILE259 + .GetTypeInfo() +#endif + .Assembly); + else if (item.TryGet("knownType", out tmp)) knownType = (Type)tmp; + } + catch (Exception ex) + { + throw new InvalidOperationException("Unable to resolve sub-type of: " + type.FullName, ex); + } + if (knownType == null) + { + throw new InvalidOperationException("Unable to resolve sub-type of: " + type.FullName); + } + if (IsValidSubType(knownType)) AddSubType(tag, knownType, dataFormat); + } + + if (fullAttributeTypeName == "ProtoBuf.ProtoPartialIgnoreAttribute") + { + if (item.TryGet(nameof(ProtoPartialIgnoreAttribute.MemberName), out tmp) && tmp != null) + { + if (partialIgnores == null) partialIgnores = new BasicList(); + partialIgnores.Add((string)tmp); + } + } + if (!isEnum && fullAttributeTypeName == "ProtoBuf.ProtoPartialMemberAttribute") + { + if (partialMembers == null) partialMembers = new BasicList(); + partialMembers.Add(item); + } + + if (fullAttributeTypeName == "ProtoBuf.ProtoContractAttribute") + { + if (item.TryGet(nameof(ProtoContractAttribute.Name), out tmp)) name = (string)tmp; + if (Helpers.IsEnum(type)) // note this is subtly different to isEnum; want to do this even if [Flags] + { + if (item.TryGet(nameof(ProtoContractAttribute.EnumPassthruHasValue), false, out tmp) && (bool)tmp) + { + if (item.TryGet(nameof(ProtoContractAttribute.EnumPassthru), out tmp)) + { + EnumPassthru = (bool)tmp; + enumShouldUseImplicitPassThru = false; + if (EnumPassthru) isEnum = false; // no longer treated as an enum + } + } + } + else + { + if (item.TryGet(nameof(ProtoContractAttribute.DataMemberOffset), out tmp)) dataMemberOffset = (int)tmp; + + if (item.TryGet(nameof(ProtoContractAttribute.InferTagFromNameHasValue), false, out tmp) && (bool)tmp) + { + if (item.TryGet(nameof(ProtoContractAttribute.InferTagFromName), out tmp)) inferTagByName = (bool)tmp; + } + + if (item.TryGet(nameof(ProtoContractAttribute.ImplicitFields), out tmp) && tmp != null) + { + implicitMode = (ImplicitFields)(int)tmp; // note that this uses the bizarre unboxing rules of enums/underlying-types + } + + if (item.TryGet(nameof(ProtoContractAttribute.SkipConstructor), out tmp)) UseConstructor = !(bool)tmp; + if (item.TryGet(nameof(ProtoContractAttribute.IgnoreListHandling), out tmp)) IgnoreListHandling = (bool)tmp; + if (item.TryGet(nameof(ProtoContractAttribute.AsReferenceDefault), out tmp)) AsReferenceDefault = (bool)tmp; + if (item.TryGet(nameof(ProtoContractAttribute.ImplicitFirstTag), out tmp) && (int)tmp > 0) implicitFirstTag = (int)tmp; + if (item.TryGet(nameof(ProtoContractAttribute.IsGroup), out tmp)) IsGroup = (bool)tmp; + + if (item.TryGet(nameof(ProtoContractAttribute.Surrogate), out tmp)) + { + SetSurrogate((Type)tmp); + } + } + } + + if (fullAttributeTypeName == "System.Runtime.Serialization.DataContractAttribute") + { + if (name == null && item.TryGet("Name", out tmp)) name = (string)tmp; + } + if (fullAttributeTypeName == "System.Xml.Serialization.XmlTypeAttribute") + { + if (name == null && item.TryGet("TypeName", out tmp)) name = (string)tmp; + } + } + if (!string.IsNullOrEmpty(name)) Name = name; + if (implicitMode != ImplicitFields.None) + { + family &= AttributeFamily.ProtoBuf; // with implicit fields, **only** proto attributes are important + } + MethodInfo[] callbacks = null; + + BasicList members = new BasicList(); + +#if PROFILE259 + IEnumerable foundList; + if(isEnum) { + foundList = type.GetRuntimeFields(); + } + else + { + List list = new List(); + foreach(PropertyInfo prop in type.GetRuntimeProperties()) { + MethodInfo getter = Helpers.GetGetMethod(prop, false, false); + if(getter != null && !getter.IsStatic) list.Add(prop); + } + foreach(FieldInfo fld in type.GetRuntimeFields()) if(fld.IsPublic && !fld.IsStatic) list.Add(fld); + foreach(MethodInfo mthd in type.GetRuntimeMethods()) if(mthd.IsPublic && !mthd.IsStatic) list.Add(mthd); + foundList = list; + } +#else + MemberInfo[] foundList = type.GetMembers(isEnum ? BindingFlags.Public | BindingFlags.Static + : BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); +#endif + bool hasConflictingEnumValue = false; + foreach (MemberInfo member in foundList) + { + if (member.DeclaringType != type) continue; + if (member.IsDefined(model.MapType(typeof(ProtoIgnoreAttribute)), true)) continue; + if (partialIgnores != null && partialIgnores.Contains(member.Name)) continue; + + bool forced = false, isPublic, isField; + Type effectiveType; + + if (member is PropertyInfo property) + { + if (isEnum) continue; // wasn't expecting any props! + MemberInfo backingField = null; + if (!property.CanWrite) + { + // roslyn automatically implemented properties, in particular for get-only properties: <{Name}>k__BackingField; + var backingFieldName = $"<{property.Name}>k__BackingField"; + foreach (var fieldMemeber in foundList) + { + if ((fieldMemeber as FieldInfo != null) && fieldMemeber.Name == backingFieldName) + { + backingField = fieldMemeber; + break; + } + } + } + effectiveType = property.PropertyType; + isPublic = Helpers.GetGetMethod(property, false, false) != null; + isField = false; + ApplyDefaultBehaviour_AddMembers(model, family, isEnum, partialMembers, dataMemberOffset, inferTagByName, implicitMode, members, member, ref forced, isPublic, isField, ref effectiveType, ref hasConflictingEnumValue, backingField); + } + else if (member is FieldInfo field) + { + effectiveType = field.FieldType; + isPublic = field.IsPublic; + isField = true; + if (isEnum && !field.IsStatic) + { // only care about static things on enums; WinRT has a __value instance field! + continue; + } + ApplyDefaultBehaviour_AddMembers(model, family, isEnum, partialMembers, dataMemberOffset, inferTagByName, implicitMode, members, member, ref forced, isPublic, isField, ref effectiveType, ref hasConflictingEnumValue); + } + else if (member is MethodInfo method) + { + if (isEnum) continue; + AttributeMap[] memberAttribs = AttributeMap.Create(model, method, false); + if (memberAttribs != null && memberAttribs.Length > 0) + { + CheckForCallback(method, memberAttribs, "ProtoBuf.ProtoBeforeSerializationAttribute", ref callbacks, 0); + CheckForCallback(method, memberAttribs, "ProtoBuf.ProtoAfterSerializationAttribute", ref callbacks, 1); + CheckForCallback(method, memberAttribs, "ProtoBuf.ProtoBeforeDeserializationAttribute", ref callbacks, 2); + CheckForCallback(method, memberAttribs, "ProtoBuf.ProtoAfterDeserializationAttribute", ref callbacks, 3); + CheckForCallback(method, memberAttribs, "System.Runtime.Serialization.OnSerializingAttribute", ref callbacks, 4); + CheckForCallback(method, memberAttribs, "System.Runtime.Serialization.OnSerializedAttribute", ref callbacks, 5); + CheckForCallback(method, memberAttribs, "System.Runtime.Serialization.OnDeserializingAttribute", ref callbacks, 6); + CheckForCallback(method, memberAttribs, "System.Runtime.Serialization.OnDeserializedAttribute", ref callbacks, 7); + } + } + } + + if (isEnum && enumShouldUseImplicitPassThru && !hasConflictingEnumValue) + { + EnumPassthru = true; + // but leave isEnum alone + } + var arr = new ProtoMemberAttribute[members.Count]; + members.CopyTo(arr, 0); + + if (inferTagByName || implicitMode != ImplicitFields.None) + { + Array.Sort(arr); + int nextTag = implicitFirstTag; + foreach (ProtoMemberAttribute normalizedAttribute in arr) + { + if (!normalizedAttribute.TagIsPinned) // if ProtoMember etc sets a tag, we'll trust it + { + normalizedAttribute.Rebase(nextTag++); + } + } + } + + foreach (ProtoMemberAttribute normalizedAttribute in arr) + { + ValueMember vm = ApplyDefaultBehaviour(isEnum, normalizedAttribute); + if (vm != null) + { + Add(vm); + } + } + + if (callbacks != null) + { + SetCallbacks(Coalesce(callbacks, 0, 4), Coalesce(callbacks, 1, 5), + Coalesce(callbacks, 2, 6), Coalesce(callbacks, 3, 7)); + } + } + + private static void ApplyDefaultBehaviour_AddMembers(TypeModel model, AttributeFamily family, bool isEnum, BasicList partialMembers, int dataMemberOffset, bool inferTagByName, ImplicitFields implicitMode, BasicList members, MemberInfo member, ref bool forced, bool isPublic, bool isField, ref Type effectiveType, ref bool hasConflictingEnumValue, MemberInfo backingMember = null) + { + switch (implicitMode) + { + case ImplicitFields.AllFields: + if (isField) forced = true; + break; + case ImplicitFields.AllPublic: + if (isPublic) forced = true; + break; + } + + // we just don't like delegate types ;p +#if COREFX || PROFILE259 + if (effectiveType.GetTypeInfo().IsSubclassOf(typeof(Delegate))) effectiveType = null; +#else + if (effectiveType.IsSubclassOf(model.MapType(typeof(Delegate)))) effectiveType = null; +#endif + if (effectiveType != null) + { + ProtoMemberAttribute normalizedAttribute = NormalizeProtoMember(model, member, family, forced, isEnum, partialMembers, dataMemberOffset, inferTagByName, ref hasConflictingEnumValue, backingMember); + if (normalizedAttribute != null) members.Add(normalizedAttribute); + } + } + + static MethodInfo Coalesce(MethodInfo[] arr, int x, int y) + { + MethodInfo mi = arr[x]; + if (mi == null) mi = arr[y]; + return mi; + } + + internal static AttributeFamily GetContractFamily(RuntimeTypeModel model, Type type, AttributeMap[] attributes) + { + AttributeFamily family = AttributeFamily.None; + + if (attributes == null) attributes = AttributeMap.Create(model, type, false); + + for (int i = 0; i < attributes.Length; i++) + { + switch (attributes[i].AttributeType.FullName) + { + case "ProtoBuf.ProtoContractAttribute": + bool tmp = false; + GetFieldBoolean(ref tmp, attributes[i], "UseProtoMembersOnly"); + if (tmp) return AttributeFamily.ProtoBuf; + family |= AttributeFamily.ProtoBuf; + break; + case "System.Xml.Serialization.XmlTypeAttribute": + if (!model.AutoAddProtoContractTypesOnly) + { + family |= AttributeFamily.XmlSerializer; + } + break; + case "System.Runtime.Serialization.DataContractAttribute": + if (!model.AutoAddProtoContractTypesOnly) + { + family |= AttributeFamily.DataContractSerialier; + } + break; + } + } + if (family == AttributeFamily.None) + { // check for obvious tuples + if (ResolveTupleConstructor(type, out MemberInfo[] mapping) != null) + { + family |= AttributeFamily.AutoTuple; + } + } + return family; + } + internal static ConstructorInfo ResolveTupleConstructor(Type type, out MemberInfo[] mappedMembers) + { + mappedMembers = null; + if (type == null) throw new ArgumentNullException(nameof(type)); +#if COREFX || PROFILE259 + TypeInfo typeInfo = type.GetTypeInfo(); + if (typeInfo.IsAbstract) return null; // as if! + ConstructorInfo[] ctors = Helpers.GetConstructors(typeInfo, false); +#else + if (type.IsAbstract) return null; // as if! + ConstructorInfo[] ctors = Helpers.GetConstructors(type, false); +#endif + // need to have an interesting constructor to bother even checking this stuff + if (ctors.Length == 0 || (ctors.Length == 1 && ctors[0].GetParameters().Length == 0)) return null; + + MemberInfo[] fieldsPropsUnfiltered = Helpers.GetInstanceFieldsAndProperties(type, true); + BasicList memberList = new BasicList(); + // for most types we'll enforce that you need readonly, because that is what protobuf-net + // always did historically; but: if you smell so much like a Tuple that it is *in your name*, + // we'll let you past that + bool demandReadOnly = type.Name.IndexOf("Tuple", StringComparison.OrdinalIgnoreCase) < 0; + for (int i = 0; i < fieldsPropsUnfiltered.Length; i++) + { + if (fieldsPropsUnfiltered[i] is PropertyInfo prop) + { + if (!prop.CanRead) return null; // no use if can't read + if (demandReadOnly && prop.CanWrite && Helpers.GetSetMethod(prop, false, false) != null) return null; // don't allow a public set (need to allow non-public to handle Mono's KeyValuePair<,>) + memberList.Add(prop); + } + else + { + if (fieldsPropsUnfiltered[i] is FieldInfo field) + { + if (demandReadOnly && !field.IsInitOnly) return null; // all public fields must be readonly to be counted a tuple + memberList.Add(field); + } + } + } + if (memberList.Count == 0) + { + return null; + } + + MemberInfo[] members = new MemberInfo[memberList.Count]; + memberList.CopyTo(members, 0); + + int[] mapping = new int[members.Length]; + int found = 0; + ConstructorInfo result = null; + mappedMembers = new MemberInfo[mapping.Length]; + for (int i = 0; i < ctors.Length; i++) + { + ParameterInfo[] parameters = ctors[i].GetParameters(); + + if (parameters.Length != members.Length) continue; + + // reset the mappings to test + for (int j = 0; j < mapping.Length; j++) mapping[j] = -1; + + for (int j = 0; j < parameters.Length; j++) + { + for (int k = 0; k < members.Length; k++) + { + if (string.Compare(parameters[j].Name, members[k].Name, StringComparison.OrdinalIgnoreCase) != 0) continue; + Type memberType = Helpers.GetMemberType(members[k]); + if (memberType != parameters[j].ParameterType) continue; + + mapping[j] = k; + } + } + // did we map all? + bool notMapped = false; + for (int j = 0; j < mapping.Length; j++) + { + if (mapping[j] < 0) + { + notMapped = true; + break; + } + mappedMembers[j] = members[mapping[j]]; + } + + if (notMapped) continue; + found++; + result = ctors[i]; + + } + return found == 1 ? result : null; + } + + private static void CheckForCallback(MethodInfo method, AttributeMap[] attributes, string callbackTypeName, ref MethodInfo[] callbacks, int index) + { + for (int i = 0; i < attributes.Length; i++) + { + if (attributes[i].AttributeType.FullName == callbackTypeName) + { + if (callbacks == null) { callbacks = new MethodInfo[8]; } + else if (callbacks[index] != null) + { +#if COREFX || PROFILE259 + Type reflected = method.DeclaringType; +#else + Type reflected = method.ReflectedType; +#endif + throw new ProtoException("Duplicate " + callbackTypeName + " callbacks on " + reflected.FullName); + } + callbacks[index] = method; + } + } + } + private static bool HasFamily(AttributeFamily value, AttributeFamily required) + { + return (value & required) == required; + } + + private static ProtoMemberAttribute NormalizeProtoMember(TypeModel model, MemberInfo member, AttributeFamily family, bool forced, bool isEnum, BasicList partialMembers, int dataMemberOffset, bool inferByTagName, ref bool hasConflictingEnumValue, MemberInfo backingMember = null) + { + if (member == null || (family == AttributeFamily.None && !isEnum)) return null; // nix + int fieldNumber = int.MinValue, minAcceptFieldNumber = inferByTagName ? -1 : 1; + string name = null; + bool isPacked = false, ignore = false, done = false, isRequired = false, asReference = false, asReferenceHasValue = false, dynamicType = false, tagIsPinned = false, overwriteList = false; + DataFormat dataFormat = DataFormat.Default; + if (isEnum) forced = true; + AttributeMap[] attribs = AttributeMap.Create(model, member, true); + AttributeMap attrib; + + if (isEnum) + { + attrib = GetAttribute(attribs, "ProtoBuf.ProtoIgnoreAttribute"); + if (attrib != null) + { + ignore = true; + } + else + { + attrib = GetAttribute(attribs, "ProtoBuf.ProtoEnumAttribute"); +#if PORTABLE || CF || COREFX || PROFILE259 + fieldNumber = Convert.ToInt32(((FieldInfo)member).GetValue(null)); +#else + fieldNumber = Convert.ToInt32(((FieldInfo)member).GetRawConstantValue()); +#endif + if (attrib != null) + { + GetFieldName(ref name, attrib, nameof(ProtoEnumAttribute.Name)); + + if ((bool)Helpers.GetInstanceMethod(attrib.AttributeType +#if COREFX || PROFILE259 + .GetTypeInfo() +#endif + , nameof(ProtoEnumAttribute.HasValue)).Invoke(attrib.Target, null)) + { + if (attrib.TryGet(nameof(ProtoEnumAttribute.Value), out object tmp)) + { + if (fieldNumber != (int)tmp) + { + hasConflictingEnumValue = true; + } + fieldNumber = (int)tmp; + } + } + } + + } + done = true; + } + + if (!ignore && !done) // always consider ProtoMember + { + attrib = GetAttribute(attribs, "ProtoBuf.ProtoMemberAttribute"); + GetIgnore(ref ignore, attrib, attribs, "ProtoBuf.ProtoIgnoreAttribute"); + + if (!ignore && attrib != null) + { + GetFieldNumber(ref fieldNumber, attrib, "Tag"); + GetFieldName(ref name, attrib, "Name"); + GetFieldBoolean(ref isRequired, attrib, "IsRequired"); + GetFieldBoolean(ref isPacked, attrib, "IsPacked"); + GetFieldBoolean(ref overwriteList, attrib, "OverwriteList"); + GetDataFormat(ref dataFormat, attrib, "DataFormat"); + GetFieldBoolean(ref asReferenceHasValue, attrib, "AsReferenceHasValue", false); + + if (asReferenceHasValue) + { + asReferenceHasValue = GetFieldBoolean(ref asReference, attrib, "AsReference", true); + } + GetFieldBoolean(ref dynamicType, attrib, "DynamicType"); + done = tagIsPinned = fieldNumber > 0; // note minAcceptFieldNumber only applies to non-proto + } + + if (!done && partialMembers != null) + { + foreach (AttributeMap ppma in partialMembers) + { + if (ppma.TryGet("MemberName", out object tmp) && (string)tmp == member.Name) + { + GetFieldNumber(ref fieldNumber, ppma, "Tag"); + GetFieldName(ref name, ppma, "Name"); + GetFieldBoolean(ref isRequired, ppma, "IsRequired"); + GetFieldBoolean(ref isPacked, ppma, "IsPacked"); + GetFieldBoolean(ref overwriteList, attrib, "OverwriteList"); + GetDataFormat(ref dataFormat, ppma, "DataFormat"); + GetFieldBoolean(ref asReferenceHasValue, attrib, "AsReferenceHasValue", false); + + if (asReferenceHasValue) + { + asReferenceHasValue = GetFieldBoolean(ref asReference, ppma, "AsReference", true); + } + GetFieldBoolean(ref dynamicType, ppma, "DynamicType"); + if (done = tagIsPinned = fieldNumber > 0) break; // note minAcceptFieldNumber only applies to non-proto + } + } + } + } + + if (!ignore && !done && HasFamily(family, AttributeFamily.DataContractSerialier)) + { + attrib = GetAttribute(attribs, "System.Runtime.Serialization.DataMemberAttribute"); + if (attrib != null) + { + GetFieldNumber(ref fieldNumber, attrib, "Order"); + GetFieldName(ref name, attrib, "Name"); + GetFieldBoolean(ref isRequired, attrib, "IsRequired"); + done = fieldNumber >= minAcceptFieldNumber; + if (done) fieldNumber += dataMemberOffset; // dataMemberOffset only applies to DCS flags, to allow us to "bump" WCF by a notch + } + } + if (!ignore && !done && HasFamily(family, AttributeFamily.XmlSerializer)) + { + attrib = GetAttribute(attribs, "System.Xml.Serialization.XmlElementAttribute"); + if (attrib == null) attrib = GetAttribute(attribs, "System.Xml.Serialization.XmlArrayAttribute"); + GetIgnore(ref ignore, attrib, attribs, "System.Xml.Serialization.XmlIgnoreAttribute"); + if (attrib != null && !ignore) + { + GetFieldNumber(ref fieldNumber, attrib, "Order"); + GetFieldName(ref name, attrib, "ElementName"); + done = fieldNumber >= minAcceptFieldNumber; + } + } + if (!ignore && !done) + { + if (GetAttribute(attribs, "System.NonSerializedAttribute") != null) ignore = true; + } + if (ignore || (fieldNumber < minAcceptFieldNumber && !forced)) return null; + ProtoMemberAttribute result = new ProtoMemberAttribute(fieldNumber, forced || inferByTagName) + { + AsReference = asReference, + AsReferenceHasValue = asReferenceHasValue, + DataFormat = dataFormat, + DynamicType = dynamicType, + IsPacked = isPacked, + OverwriteList = overwriteList, + IsRequired = isRequired, + Name = string.IsNullOrEmpty(name) ? member.Name : name, + Member = member, + BackingMember = backingMember, + TagIsPinned = tagIsPinned + }; + return result; + } + + private ValueMember ApplyDefaultBehaviour(bool isEnum, ProtoMemberAttribute normalizedAttribute) + { + MemberInfo member; + if (normalizedAttribute == null || (member = normalizedAttribute.Member) == null) return null; // nix + + Type effectiveType = Helpers.GetMemberType(member); + + + Type itemType = null; + Type defaultType = null; + + // check for list types + ResolveListTypes(model, effectiveType, ref itemType, ref defaultType); + bool ignoreListHandling = false; + // but take it back if it is explicitly excluded + if (itemType != null) + { // looks like a list, but double check for IgnoreListHandling + int idx = model.FindOrAddAuto(effectiveType, false, true, false); + if (idx >= 0 && (ignoreListHandling = model[effectiveType].IgnoreListHandling)) + { + itemType = null; + defaultType = null; + } + } + AttributeMap[] attribs = AttributeMap.Create(model, member, true); + AttributeMap attrib; + + object defaultValue = null; + // implicit zero default + if (model.UseImplicitZeroDefaults) + { + switch (Helpers.GetTypeCode(effectiveType)) + { + case ProtoTypeCode.Boolean: defaultValue = false; break; + case ProtoTypeCode.Decimal: defaultValue = (decimal)0; break; + case ProtoTypeCode.Single: defaultValue = (float)0; break; + case ProtoTypeCode.Double: defaultValue = (double)0; break; + case ProtoTypeCode.Byte: defaultValue = (byte)0; break; + case ProtoTypeCode.Char: defaultValue = (char)0; break; + case ProtoTypeCode.Int16: defaultValue = (short)0; break; + case ProtoTypeCode.Int32: defaultValue = (int)0; break; + case ProtoTypeCode.Int64: defaultValue = (long)0; break; + case ProtoTypeCode.SByte: defaultValue = (sbyte)0; break; + case ProtoTypeCode.UInt16: defaultValue = (ushort)0; break; + case ProtoTypeCode.UInt32: defaultValue = (uint)0; break; + case ProtoTypeCode.UInt64: defaultValue = (ulong)0; break; + case ProtoTypeCode.TimeSpan: defaultValue = TimeSpan.Zero; break; + case ProtoTypeCode.Guid: defaultValue = Guid.Empty; break; + } + } + if ((attrib = GetAttribute(attribs, "System.ComponentModel.DefaultValueAttribute")) != null) + { + if (attrib.TryGet("Value", out object tmp)) defaultValue = tmp; + } + ValueMember vm = ((isEnum || normalizedAttribute.Tag > 0)) + ? new ValueMember(model, type, normalizedAttribute.Tag, member, effectiveType, itemType, defaultType, normalizedAttribute.DataFormat, defaultValue) + : null; + if (vm != null) + { + vm.BackingMember = normalizedAttribute.BackingMember; +#if COREFX || PROFILE259 + TypeInfo finalType = typeInfo; +#else + Type finalType = type; +#endif + PropertyInfo prop = Helpers.GetProperty(finalType, member.Name + "Specified", true); + MethodInfo getMethod = Helpers.GetGetMethod(prop, true, true); + if (getMethod == null || getMethod.IsStatic) prop = null; + if (prop != null) + { + vm.SetSpecified(getMethod, Helpers.GetSetMethod(prop, true, true)); + } + else + { + MethodInfo method = Helpers.GetInstanceMethod(finalType, "ShouldSerialize" + member.Name, Helpers.EmptyTypes); + if (method != null && method.ReturnType == model.MapType(typeof(bool))) + { + vm.SetSpecified(method, null); + } + } + if (!string.IsNullOrEmpty(normalizedAttribute.Name)) vm.SetName(normalizedAttribute.Name); + vm.IsPacked = normalizedAttribute.IsPacked; + vm.IsRequired = normalizedAttribute.IsRequired; + vm.OverwriteList = normalizedAttribute.OverwriteList; + if (normalizedAttribute.AsReferenceHasValue) + { + vm.AsReference = normalizedAttribute.AsReference; + } + vm.DynamicType = normalizedAttribute.DynamicType; + + vm.IsMap = ignoreListHandling ? false : vm.ResolveMapTypes(out var _, out var _, out var _); + if (vm.IsMap) // is it even *allowed* to be a map? + { + if ((attrib = GetAttribute(attribs, "ProtoBuf.ProtoMapAttribute")) != null) + { + if (attrib.TryGet(nameof(ProtoMapAttribute.DisableMap), out object tmp) && (bool)tmp) + { + vm.IsMap = false; + } + else + { + if (attrib.TryGet(nameof(ProtoMapAttribute.KeyFormat), out tmp)) vm.MapKeyFormat = (DataFormat)tmp; + if (attrib.TryGet(nameof(ProtoMapAttribute.ValueFormat), out tmp)) vm.MapValueFormat = (DataFormat)tmp; + } + } + } + + } + return vm; + } + + private static void GetDataFormat(ref DataFormat value, AttributeMap attrib, string memberName) + { + if ((attrib == null) || (value != DataFormat.Default)) return; + if (attrib.TryGet(memberName, out object obj) && obj != null) value = (DataFormat)obj; + } + + private static void GetIgnore(ref bool ignore, AttributeMap attrib, AttributeMap[] attribs, string fullName) + { + if (ignore || attrib == null) return; + ignore = GetAttribute(attribs, fullName) != null; + return; + } + + private static void GetFieldBoolean(ref bool value, AttributeMap attrib, string memberName) + { + GetFieldBoolean(ref value, attrib, memberName, true); + } + private static bool GetFieldBoolean(ref bool value, AttributeMap attrib, string memberName, bool publicOnly) + { + if (attrib == null) return false; + if (value) return true; + if (attrib.TryGet(memberName, publicOnly, out object obj) && obj != null) + { + value = (bool)obj; + return true; + } + return false; + } + + private static void GetFieldNumber(ref int value, AttributeMap attrib, string memberName) + { + if (attrib == null || value > 0) return; + if (attrib.TryGet(memberName, out object obj) && obj != null) value = (int)obj; + } + + private static void GetFieldName(ref string name, AttributeMap attrib, string memberName) + { + if (attrib == null || !string.IsNullOrEmpty(name)) return; + if (attrib.TryGet(memberName, out object obj) && obj != null) name = (string)obj; + } + + private static AttributeMap GetAttribute(AttributeMap[] attribs, string fullName) + { + for (int i = 0; i < attribs.Length; i++) + { + AttributeMap attrib = attribs[i]; + if (attrib != null && attrib.AttributeType.FullName == fullName) return attrib; + } + return null; + } + + /// + /// Adds a member (by name) to the MetaType + /// + public MetaType Add(int fieldNumber, string memberName) + { + AddField(fieldNumber, memberName, null, null, null); + return this; + } + + /// + /// Adds a member (by name) to the MetaType, returning the ValueMember rather than the fluent API. + /// This is otherwise identical to Add. + /// + public ValueMember AddField(int fieldNumber, string memberName) + { + return AddField(fieldNumber, memberName, null, null, null); + } + + /// + /// Gets or sets whether the type should use a parameterless constructor (the default), + /// or whether the type should skip the constructor completely. This option is not supported + /// on compact-framework. + /// + public bool UseConstructor + { // negated to have defaults as flat zero + get { return !HasFlag(OPTIONS_SkipConstructor); } + set { SetFlag(OPTIONS_SkipConstructor, !value, true); } + } + + /// + /// The concrete type to create when a new instance of this type is needed; this may be useful when dealing + /// with dynamic proxies, or with interface-based APIs + /// + public Type ConstructType + { + get { return constructType; } + set + { + ThrowIfFrozen(); + constructType = value; + } + } + + private Type constructType; + /// + /// Adds a member (by name) to the MetaType + /// + public MetaType Add(string memberName) + { + Add(GetNextFieldNumber(), memberName); + return this; + } + + Type surrogate; + /// + /// Performs serialization of this type via a surrogate; all + /// other serialization options are ignored and handled + /// by the surrogate's configuration. + /// + public void SetSurrogate(Type surrogateType) + { + if (surrogateType == type) surrogateType = null; + if (surrogateType != null) + { + // note that BuildSerializer checks the **CURRENT TYPE** is OK to be surrogated + if (surrogateType != null && Helpers.IsAssignableFrom(model.MapType(typeof(IEnumerable)), surrogateType)) + { + throw new ArgumentException("Repeated data (a list, collection, etc) has inbuilt behaviour and cannot be used as a surrogate"); + } + } + ThrowIfFrozen(); + this.surrogate = surrogateType; + // no point in offering chaining; no options are respected + } + + internal MetaType GetSurrogateOrSelf() + { + if (surrogate != null) return model[surrogate]; + return this; + } + + internal MetaType GetSurrogateOrBaseOrSelf(bool deep) + { + if (surrogate != null) return model[surrogate]; + MetaType snapshot = this.baseType; + if (snapshot != null) + { + if (deep) + { + MetaType tmp; + do + { + tmp = snapshot; + snapshot = snapshot.baseType; + } while (snapshot != null); + return tmp; + } + return snapshot; + } + return this; + } + + private int GetNextFieldNumber() + { + int maxField = 0; + foreach (ValueMember member in fields) + { + if (member.FieldNumber > maxField) maxField = member.FieldNumber; + } + if (subTypes != null) + { + foreach (SubType subType in subTypes) + { + if (subType.FieldNumber > maxField) maxField = subType.FieldNumber; + } + } + return maxField + 1; + } + + /// + /// Adds a set of members (by name) to the MetaType + /// + public MetaType Add(params string[] memberNames) + { + if (memberNames == null) throw new ArgumentNullException("memberNames"); + int next = GetNextFieldNumber(); + for (int i = 0; i < memberNames.Length; i++) + { + Add(next++, memberNames[i]); + } + return this; + } + + /// + /// Adds a member (by name) to the MetaType + /// + public MetaType Add(int fieldNumber, string memberName, object defaultValue) + { + AddField(fieldNumber, memberName, null, null, defaultValue); + return this; + } + + /// + /// Adds a member (by name) to the MetaType, including an itemType and defaultType for representing lists + /// + public MetaType Add(int fieldNumber, string memberName, Type itemType, Type defaultType) + { + AddField(fieldNumber, memberName, itemType, defaultType, null); + return this; + } + + /// + /// Adds a member (by name) to the MetaType, including an itemType and defaultType for representing lists, returning the ValueMember rather than the fluent API. + /// This is otherwise identical to Add. + /// + public ValueMember AddField(int fieldNumber, string memberName, Type itemType, Type defaultType) + { + return AddField(fieldNumber, memberName, itemType, defaultType, null); + } + + private ValueMember AddField(int fieldNumber, string memberName, Type itemType, Type defaultType, object defaultValue) + { + MemberInfo mi = null; +#if PROFILE259 + mi = Helpers.IsEnum(type) ? type.GetTypeInfo().GetDeclaredField(memberName) : Helpers.GetInstanceMember(type.GetTypeInfo(), memberName); + +#else + MemberInfo[] members = type.GetMember(memberName, Helpers.IsEnum(type) ? BindingFlags.Static | BindingFlags.Public : BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (members != null && members.Length == 1) mi = members[0]; +#endif + if (mi == null) throw new ArgumentException("Unable to determine member: " + memberName, "memberName"); + + Type miType; + PropertyInfo pi = null; + FieldInfo fi = null; +#if PORTABLE || COREFX || PROFILE259 + pi = mi as PropertyInfo; + if (pi == null) + { + fi = mi as FieldInfo; + if (fi == null) + { + throw new NotSupportedException(mi.GetType().Name); + } + else + { + miType = fi.FieldType; + } + } + else + { + miType = pi.PropertyType; + } +#else + switch (mi.MemberType) + { + case MemberTypes.Field: + fi = (FieldInfo)mi; + miType = fi.FieldType; break; + case MemberTypes.Property: + pi = (PropertyInfo)mi; + miType = pi.PropertyType; break; + default: + throw new NotSupportedException(mi.MemberType.ToString()); + } +#endif + ResolveListTypes(model, miType, ref itemType, ref defaultType); + + MemberInfo backingField = null; + if (pi?.CanWrite == false) + { + string name = $"<{((PropertyInfo)mi).Name}>k__BackingField"; +#if PROFILE259 + var backingMembers = type.GetTypeInfo().DeclaredMembers; + var memberInfos = backingMembers as MemberInfo[] ?? backingMembers.ToArray(); + if (memberInfos.Count() == 1) + { + MemberInfo first = memberInfos.FirstOrDefault(); + if (first is FieldInfo) + { + backingField = first; + } + } +#else + var backingMembers = type.GetMember($"<{((PropertyInfo)mi).Name}>k__BackingField", Helpers.IsEnum(type) ? BindingFlags.Static | BindingFlags.Public : BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic); + if (backingMembers != null && backingMembers.Length == 1 && (backingMembers[0] as FieldInfo) != null) + backingField = backingMembers[0]; +#endif + } + ValueMember newField = new ValueMember(model, type, fieldNumber, backingField ?? mi, miType, itemType, defaultType, DataFormat.Default, defaultValue); + if (backingField != null) + newField.SetName(mi.Name); + Add(newField); + return newField; + } + + internal static void ResolveListTypes(TypeModel model, Type type, ref Type itemType, ref Type defaultType) + { + if (type == null) return; + // handle arrays + if (type.IsArray) + { + if (type.GetArrayRank() != 1) + { + throw new NotSupportedException("Multi-dimensional arrays are not supported"); + } + itemType = type.GetElementType(); + if (itemType == model.MapType(typeof(byte))) + { + defaultType = itemType = null; + } + else + { + defaultType = type; + } + } + // handle lists + if (itemType == null) { itemType = TypeModel.GetListItemType(model, type); } + + // check for nested data (not allowed) + if (itemType != null) + { + Type nestedItemType = null, nestedDefaultType = null; + ResolveListTypes(model, itemType, ref nestedItemType, ref nestedDefaultType); + if (nestedItemType != null) + { + throw TypeModel.CreateNestedListsNotSupported(type); + } + } + + if (itemType != null && defaultType == null) + { +#if COREFX || PROFILE259 + TypeInfo typeInfo = type.GetTypeInfo(); + if (typeInfo.IsClass && !typeInfo.IsAbstract && Helpers.GetConstructor(typeInfo, Helpers.EmptyTypes, true) != null) +#else + if (type.IsClass && !type.IsAbstract && Helpers.GetConstructor(type, Helpers.EmptyTypes, true) != null) +#endif + { + defaultType = type; + } + if (defaultType == null) + { +#if COREFX || PROFILE259 + if (typeInfo.IsInterface) +#else + if (type.IsInterface) +#endif + { + + Type[] genArgs; +#if COREFX || PROFILE259 + if (typeInfo.IsGenericType && type.GetGenericTypeDefinition() == typeof(System.Collections.Generic.IDictionary<,>) + && itemType == typeof(System.Collections.Generic.KeyValuePair<,>).MakeGenericType(genArgs = typeInfo.GenericTypeArguments)) +#else + if (type.IsGenericType && type.GetGenericTypeDefinition() == model.MapType(typeof(System.Collections.Generic.IDictionary<,>)) + && itemType == model.MapType(typeof(System.Collections.Generic.KeyValuePair<,>)).MakeGenericType(genArgs = type.GetGenericArguments())) +#endif + { + defaultType = model.MapType(typeof(System.Collections.Generic.Dictionary<,>)).MakeGenericType(genArgs); + } + else + { + defaultType = model.MapType(typeof(System.Collections.Generic.List<>)).MakeGenericType(itemType); + } + } + } + // verify that the default type is appropriate + if (defaultType != null && !Helpers.IsAssignableFrom(type, defaultType)) { defaultType = null; } + } + } + + private void Add(ValueMember member) + { + int opaqueToken = 0; + try + { + model.TakeLock(ref opaqueToken); + ThrowIfFrozen(); + fields.Add(member); + } + finally + { + model.ReleaseLock(opaqueToken); + } + } + + /// + /// Returns the ValueMember that matchs a given field number, or null if not found + /// + public ValueMember this[int fieldNumber] + { + get + { + foreach (ValueMember member in fields) + { + if (member.FieldNumber == fieldNumber) return member; + } + return null; + } + } + /// + /// Returns the ValueMember that matchs a given member (property/field), or null if not found + /// + public ValueMember this[MemberInfo member] + { + get + { + if (member == null) return null; + foreach (ValueMember x in fields) + { + if (x.Member == member || x.BackingMember == member) return x; + } + return null; + } + } + private readonly BasicList fields = new BasicList(); + + /// + /// Returns the ValueMember instances associated with this type + /// + public ValueMember[] GetFields() + { + ValueMember[] arr = new ValueMember[fields.Count]; + fields.CopyTo(arr, 0); + Array.Sort(arr, ValueMember.Comparer.Default); + return arr; + } + + /// + /// Returns the SubType instances associated with this type + /// + public SubType[] GetSubtypes() + { + if (subTypes == null || subTypes.Count == 0) return new SubType[0]; + SubType[] arr = new SubType[subTypes.Count]; + subTypes.CopyTo(arr, 0); + Array.Sort(arr, SubType.Comparer.Default); + return arr; + } + + internal IEnumerable GetAllGenericArguments() + { + return GetAllGenericArguments(type); + } + + private static IEnumerable GetAllGenericArguments(Type type) + { + +#if PROFILE259 + var genericArguments = type.GetGenericTypeDefinition().GenericTypeArguments; +#else + var genericArguments = type.GetGenericArguments(); +#endif + foreach (var arg in genericArguments) + { + yield return arg; + foreach (var inner in GetAllGenericArguments(arg)) + { + yield return inner; + } + } + } + +#if FEAT_COMPILER + /// + /// Compiles the serializer for this type; this is *not* a full + /// standalone compile, but can significantly boost performance + /// while allowing additional types to be added. + /// + /// An in-place compile can access non-public types / members + public void CompileInPlace() + { + serializer = CompiledSerializer.Wrap(Serializer, model); + } +#endif + + internal bool IsDefined(int fieldNumber) + { + foreach (ValueMember field in fields) + { + if (field.FieldNumber == fieldNumber) return true; + } + return false; + } + + internal int GetKey(bool demand, bool getBaseKey) + { + return model.GetKey(type, demand, getBaseKey); + } + + internal EnumSerializer.EnumPair[] GetEnumMap() + { + if (HasFlag(OPTIONS_EnumPassThru)) return null; + EnumSerializer.EnumPair[] result = new EnumSerializer.EnumPair[fields.Count]; + for (int i = 0; i < result.Length; i++) + { + ValueMember member = (ValueMember)fields[i]; + int wireValue = member.FieldNumber; + object value = member.GetRawEnumValue(); + result[i] = new EnumSerializer.EnumPair(wireValue, value, member.MemberType); + } + return result; + } + + /// + /// Gets or sets a value indicating that an enum should be treated directly as an int/short/etc, rather + /// than enforcing .proto enum rules. This is useful *in particul* for [Flags] enums. + /// + public bool EnumPassthru + { + get { return HasFlag(OPTIONS_EnumPassThru); } + set { SetFlag(OPTIONS_EnumPassThru, value, true); } + } + + /// + /// Gets or sets a value indicating that this type should NOT be treated as a list, even if it has + /// familiar list-like characteristics (enumerable, add, etc) + /// + public bool IgnoreListHandling + { + get { return HasFlag(OPTIONS_IgnoreListHandling); } + set { SetFlag(OPTIONS_IgnoreListHandling, value, true); } + } + + internal bool Pending + { + get { return HasFlag(OPTIONS_Pending); } + set { SetFlag(OPTIONS_Pending, value, false); } + } + + private const ushort + OPTIONS_Pending = 1, + OPTIONS_EnumPassThru = 2, + OPTIONS_Frozen = 4, + OPTIONS_PrivateOnApi = 8, + OPTIONS_SkipConstructor = 16, + OPTIONS_AsReferenceDefault = 32, + OPTIONS_AutoTuple = 64, + OPTIONS_IgnoreListHandling = 128, + OPTIONS_IsGroup = 256; + + private volatile ushort flags; + private bool HasFlag(ushort flag) { return (flags & flag) == flag; } + private void SetFlag(ushort flag, bool value, bool throwIfFrozen) + { + if (throwIfFrozen && HasFlag(flag) != value) + { + ThrowIfFrozen(); + } + if (value) + flags |= flag; + else + flags = (ushort)(flags & ~flag); + } + + internal static MetaType GetRootType(MetaType source) + { + while (source.serializer != null) + { + MetaType tmp = source.baseType; + if (tmp == null) return source; + source = tmp; // else loop until we reach something that isn't generated, or is the root + } + + // now we get into uncertain territory + RuntimeTypeModel model = source.model; + int opaqueToken = 0; + try + { + model.TakeLock(ref opaqueToken); + + MetaType tmp; + while ((tmp = source.baseType) != null) source = tmp; + return source; + + } + finally + { + model.ReleaseLock(opaqueToken); + } + } + + internal bool IsPrepared() + { +#if FEAT_COMPILER + return serializer is CompiledSerializer; +#else + return false; +#endif + } + + internal IEnumerable Fields => this.fields; + + internal static StringBuilder NewLine(StringBuilder builder, int indent) + { + return Helpers.AppendLine(builder).Append(' ', indent * 3); + } + + internal bool IsAutoTuple => HasFlag(OPTIONS_AutoTuple); + + /// + /// Indicates whether this type should always be treated as a "group" (rather than a string-prefixed sub-message) + /// + public bool IsGroup + { + get { return HasFlag(OPTIONS_IsGroup); } + set { SetFlag(OPTIONS_IsGroup, value, true); } + } + + internal void WriteSchema(StringBuilder builder, int indent, ref RuntimeTypeModel.CommonImports imports, ProtoSyntax syntax) + { + if (surrogate != null) return; // nothing to write + + ValueMember[] fieldsArr = new ValueMember[fields.Count]; + fields.CopyTo(fieldsArr, 0); + Array.Sort(fieldsArr, ValueMember.Comparer.Default); + + if (IsList) + { + string itemTypeName = model.GetSchemaTypeName(TypeModel.GetListItemType(model, type), DataFormat.Default, false, false, ref imports); + NewLine(builder, indent).Append("message ").Append(GetSchemaTypeName()).Append(" {"); + NewLine(builder, indent + 1).Append("repeated ").Append(itemTypeName).Append(" items = 1;"); + NewLine(builder, indent).Append('}'); + } + else if (IsAutoTuple) + { // key-value-pair etc + + if (ResolveTupleConstructor(type, out MemberInfo[] mapping) != null) + { + NewLine(builder, indent).Append("message ").Append(GetSchemaTypeName()).Append(" {"); + for (int i = 0; i < mapping.Length; i++) + { + Type effectiveType; + if (mapping[i] is PropertyInfo property) + { + effectiveType = property.PropertyType; + } + else if (mapping[i] is FieldInfo field) + { + effectiveType = field.FieldType; + } + else + { + throw new NotSupportedException("Unknown member type: " + mapping[i].GetType().Name); + } + NewLine(builder, indent + 1).Append(syntax == ProtoSyntax.Proto2 ? "optional " : "").Append(model.GetSchemaTypeName(effectiveType, DataFormat.Default, false, false, ref imports).Replace('.', '_')) + .Append(' ').Append(mapping[i].Name).Append(" = ").Append(i + 1).Append(';'); + } + NewLine(builder, indent).Append('}'); + } + } + else if (Helpers.IsEnum(type)) + { + NewLine(builder, indent).Append("enum ").Append(GetSchemaTypeName()).Append(" {"); + if (fieldsArr.Length == 0 && EnumPassthru) + { + if (type +#if COREFX || PROFILE259 + .GetTypeInfo() +#endif +.IsDefined(model.MapType(typeof(FlagsAttribute)), false)) + { + NewLine(builder, indent + 1).Append("// this is a composite/flags enumeration"); + } + else + { + NewLine(builder, indent + 1).Append("// this enumeration will be passed as a raw value"); + } + foreach (FieldInfo field in +#if PROFILE259 + type.GetRuntimeFields() +#else + type.GetFields() +#endif + + ) + { + if (field.IsStatic && field.IsLiteral) + { + object enumVal; +#if PORTABLE || CF || NETSTANDARD1_3 || NETSTANDARD1_4 || PROFILE259 || UAP + enumVal = Convert.ChangeType(field.GetValue(null), Enum.GetUnderlyingType(field.FieldType), System.Globalization.CultureInfo.InvariantCulture); +#else + enumVal = field.GetRawConstantValue(); +#endif + NewLine(builder, indent + 1).Append(field.Name).Append(" = ").Append(enumVal).Append(";"); + } + } + + } + else + { + Dictionary countByField = new Dictionary(fieldsArr.Length); + bool needsAlias = false; + foreach (var field in fieldsArr) + { + if (countByField.ContainsKey(field.FieldNumber)) + { // no point actually counting; that's enough to know we have a problem + needsAlias = true; + break; + } + countByField.Add(field.FieldNumber, 1); + } + if (needsAlias) + { // duplicated value requires allow_alias + NewLine(builder, indent + 1).Append("option allow_alias = true;"); + } + + bool haveWrittenZero = false; + // write zero values **first** + foreach (ValueMember member in fieldsArr) + { + if (member.FieldNumber == 0) + { + NewLine(builder, indent + 1).Append(member.Name).Append(" = ").Append(member.FieldNumber).Append(';'); + haveWrittenZero = true; + } + } + if (syntax == ProtoSyntax.Proto3 && !haveWrittenZero) + { + NewLine(builder, indent + 1).Append("ZERO = 0; // proto3 requires a zero value as the first item (it can be named anything)"); + } + // note array is already sorted, so zero would already be first + foreach (ValueMember member in fieldsArr) + { + if (member.FieldNumber == 0) continue; + NewLine(builder, indent + 1).Append(member.Name).Append(" = ").Append(member.FieldNumber).Append(';'); + } + } + NewLine(builder, indent).Append('}'); + } + else + { + NewLine(builder, indent).Append("message ").Append(GetSchemaTypeName()).Append(" {"); + foreach (ValueMember member in fieldsArr) + { + string schemaTypeName; + bool hasOption = false; + if (member.IsMap) + { + member.ResolveMapTypes(out var _, out var keyType, out var valueType); + + var keyTypeName = model.GetSchemaTypeName(keyType, member.MapKeyFormat, false, false, ref imports); + schemaTypeName = model.GetSchemaTypeName(valueType, member.MapKeyFormat, member.AsReference, member.DynamicType, ref imports); + NewLine(builder, indent + 1).Append("map<").Append(keyTypeName).Append(",").Append(schemaTypeName).Append("> ") + .Append(member.Name).Append(" = ").Append(member.FieldNumber).Append(";"); + } + else + { + string ordinality = member.ItemType != null ? "repeated " : (syntax == ProtoSyntax.Proto2 ? (member.IsRequired ? "required " : "optional ") : ""); + NewLine(builder, indent + 1).Append(ordinality); + if (member.DataFormat == DataFormat.Group) builder.Append("group "); + schemaTypeName = member.GetSchemaTypeName(true, ref imports); + builder.Append(schemaTypeName).Append(" ") + .Append(member.Name).Append(" = ").Append(member.FieldNumber); + + if (syntax == ProtoSyntax.Proto2 && member.DefaultValue != null && member.IsRequired == false) + { + if (member.DefaultValue is string) + { + AddOption(builder, ref hasOption).Append("default = \"").Append(member.DefaultValue).Append("\""); + } + else if (member.DefaultValue is TimeSpan) + { + // ignore + } + else if (member.DefaultValue is bool) + { // need to be lower case (issue 304) + AddOption(builder, ref hasOption).Append((bool)member.DefaultValue ? "default = true" : "default = false"); + } + else + { + AddOption(builder, ref hasOption).Append("default = ").Append(member.DefaultValue); + } + } + if (CanPack(member.ItemType)) + { + if (syntax == ProtoSyntax.Proto2) + { + if (member.IsPacked) AddOption(builder, ref hasOption).Append("packed = true"); // disabled by default + } + else + { + if (!member.IsPacked) AddOption(builder, ref hasOption).Append("packed = false"); // enabled by default + } + } + if (member.AsReference) + { + imports |= RuntimeTypeModel.CommonImports.Protogen; + AddOption(builder, ref hasOption).Append("(.protobuf_net.fieldopt).asRef = true"); + } + if (member.DynamicType) + { + imports |= RuntimeTypeModel.CommonImports.Protogen; + AddOption(builder, ref hasOption).Append("(.protobuf_net.fieldopt).dynamicType = true"); + } + CloseOption(builder, ref hasOption).Append(';'); + if (syntax != ProtoSyntax.Proto2 && member.DefaultValue != null && !member.IsRequired) + { + if (IsImplicitDefault(member.DefaultValue)) + { + // don't emit; we're good + } + else + { + builder.Append(" // default value could not be applied: ").Append(member.DefaultValue); + } + } + } + if (schemaTypeName == ".bcl.NetObjectProxy" && member.AsReference && !member.DynamicType) // we know what it is; tell the user + { + builder.Append(" // reference-tracked ").Append(member.GetSchemaTypeName(false, ref imports)); + } + } + if (subTypes != null && subTypes.Count != 0) + { + SubType[] subTypeArr = new SubType[subTypes.Count]; + subTypes.CopyTo(subTypeArr, 0); + Array.Sort(subTypeArr, SubType.Comparer.Default); + string[] fieldNames = new string[subTypeArr.Length]; + for(int i = 0; i < subTypeArr.Length;i++) + fieldNames[i] = subTypeArr[i].DerivedType.GetSchemaTypeName(); + + string fieldName = "subtype"; + while (Array.IndexOf(fieldNames, fieldName) >= 0) + fieldName = "_" + fieldName; + + NewLine(builder, indent + 1).Append("oneof ").Append(fieldName).Append(" {"); + for(int i = 0; i < subTypeArr.Length; i++) + { + var subTypeName = fieldNames[i]; + NewLine(builder, indent + 2).Append(subTypeName) + .Append(" ").Append(subTypeName).Append(" = ").Append(subTypeArr[i].FieldNumber).Append(';'); + } + NewLine(builder, indent + 1).Append("}"); + } + NewLine(builder, indent).Append('}'); + } + } + + private static StringBuilder AddOption(StringBuilder builder, ref bool hasOption) + { + if (hasOption) + return builder.Append(", "); + hasOption = true; + return builder.Append(" ["); + } + + private static StringBuilder CloseOption(StringBuilder builder, ref bool hasOption) + { + if (hasOption) + { + hasOption = false; + return builder.Append("]"); + } + return builder; + } + + private static bool IsImplicitDefault(object value) + { + try + { + if (value == null) return false; + switch (Helpers.GetTypeCode(value.GetType())) + { + case ProtoTypeCode.Boolean: return ((bool)value) == false; + case ProtoTypeCode.Byte: return ((byte)value) == (byte)0; + case ProtoTypeCode.Char: return ((char)value) == (char)0; + case ProtoTypeCode.DateTime: return ((DateTime)value) == default; + case ProtoTypeCode.Decimal: return ((decimal)value) == 0M; + case ProtoTypeCode.Double: return ((double)value) == (double)0; + case ProtoTypeCode.Int16: return ((short)value) == (short)0; + case ProtoTypeCode.Int32: return ((int)value) == (int)0; + case ProtoTypeCode.Int64: return ((long)value) == (long)0; + case ProtoTypeCode.SByte: return ((sbyte)value) == (sbyte)0; + case ProtoTypeCode.Single: return ((float)value) == (float)0; + case ProtoTypeCode.String: return ((string)value) == ""; + case ProtoTypeCode.TimeSpan: return ((TimeSpan)value) == TimeSpan.Zero; + case ProtoTypeCode.UInt16: return ((ushort)value) == (ushort)0; + case ProtoTypeCode.UInt32: return ((uint)value) == (uint)0; + case ProtoTypeCode.UInt64: return ((ulong)value) == (ulong)0; + } + } + catch { } + return false; + } + + private static bool CanPack(Type type) + { + if (type == null) return false; + switch (Helpers.GetTypeCode(type)) + { + case ProtoTypeCode.Boolean: + case ProtoTypeCode.Byte: + case ProtoTypeCode.Char: + case ProtoTypeCode.Double: + case ProtoTypeCode.Int16: + case ProtoTypeCode.Int32: + case ProtoTypeCode.Int64: + case ProtoTypeCode.SByte: + case ProtoTypeCode.Single: + case ProtoTypeCode.UInt16: + case ProtoTypeCode.UInt32: + case ProtoTypeCode.UInt64: + return true; + } + return false; + } + + /// + /// Apply a shift to all fields (and sub-types) on this type + /// + /// The change in field number to apply + /// The resultant field numbers must still all be considered valid +#if !(NETSTANDARD1_0 || NETSTANDARD1_3 || UAP) + [System.ComponentModel.Browsable(false)] +#endif + [System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Advanced)] + public void ApplyFieldOffset(int offset) + { + if (Helpers.IsEnum(type)) throw new InvalidOperationException("Cannot apply field-offset to an enum"); + if (offset == 0) return; // nothing to do + int opaqueToken = 0; + try + { + model.TakeLock(ref opaqueToken); + ThrowIfFrozen(); + + if (fields != null) + { + foreach(ValueMember field in fields) + AssertValidFieldNumber(field.FieldNumber + offset); + } + if (subTypes != null) + { + foreach (SubType subType in subTypes) + AssertValidFieldNumber(subType.FieldNumber + offset); + } + + // we've checked the ranges are all OK; since we're moving everything, we can't overlap ourselves + // so: we can just move + if (fields != null) + { + foreach (ValueMember field in fields) + field.FieldNumber += offset; + } + if (subTypes != null) + { + foreach (SubType subType in subTypes) + subType.FieldNumber += offset; + } + } + finally + { + model.ReleaseLock(opaqueToken); + } + } + + internal static void AssertValidFieldNumber(int fieldNumber) + { + if (fieldNumber < 1) throw new ArgumentOutOfRangeException(nameof(fieldNumber)); + } + } +} +#endif diff --git a/Runtime/Protobuf-net/Meta/MetaType.cs.meta b/Runtime/Protobuf-net/Meta/MetaType.cs.meta new file mode 100644 index 0000000..edc2cad --- /dev/null +++ b/Runtime/Protobuf-net/Meta/MetaType.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 170c607ac9d3b9346a8f4197e9e4d86a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/ProtoSyntax.cs b/Runtime/Protobuf-net/Meta/ProtoSyntax.cs new file mode 100644 index 0000000..ab90d5b --- /dev/null +++ b/Runtime/Protobuf-net/Meta/ProtoSyntax.cs @@ -0,0 +1,17 @@ +namespace ProtoBuf.Meta +{ + /// + /// Indiate the variant of the protobuf .proto DSL syntax to use + /// + public enum ProtoSyntax + { + /// + /// https://developers.google.com/protocol-buffers/docs/proto + /// + Proto2 = 0, + /// + /// https://developers.google.com/protocol-buffers/docs/proto3 + /// + Proto3 = 1, + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/Meta/ProtoSyntax.cs.meta b/Runtime/Protobuf-net/Meta/ProtoSyntax.cs.meta new file mode 100644 index 0000000..2320025 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/ProtoSyntax.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 8df2b30e0bc1f274a8170e86c9d08f96 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/RuntimeTypeModel.cs b/Runtime/Protobuf-net/Meta/RuntimeTypeModel.cs new file mode 100644 index 0000000..05dfcf1 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/RuntimeTypeModel.cs @@ -0,0 +1,2036 @@ +#if !NO_RUNTIME +using System; +using System.Collections; +using System.Text; +using System.Reflection; +#if FEAT_COMPILER +using System.Reflection.Emit; +#endif + +using ProtoBuf.Serializers; +using System.Threading; +using System.IO; +using System.Diagnostics; +using System.Runtime.CompilerServices; + +namespace ProtoBuf.Meta +{ + /// + /// Provides protobuf serialization support for a number of types that can be defined at runtime + /// + public sealed class RuntimeTypeModel : TypeModel + { + private ushort options; + private const ushort + OPTIONS_InferTagFromNameDefault = 1, + OPTIONS_IsDefaultModel = 2, + OPTIONS_Frozen = 4, + OPTIONS_AutoAddMissingTypes = 8, +#if FEAT_COMPILER + OPTIONS_AutoCompile = 16, +#endif + OPTIONS_UseImplicitZeroDefaults = 32, + OPTIONS_AllowParseableTypes = 64, + OPTIONS_AutoAddProtoContractTypesOnly = 128, + OPTIONS_IncludeDateTimeKind = 256, + OPTIONS_DoNotInternStrings = 512; + + private bool GetOption(ushort option) + { + return (options & option) == option; + } + + private void SetOption(ushort option, bool value) + { + if (value) options |= option; + else options &= (ushort)~option; + } + + /// + /// Global default that + /// enables/disables automatic tag generation based on the existing name / order + /// of the defined members. See + /// for usage and important warning / explanation. + /// You must set the global default before attempting to serialize/deserialize any + /// impacted type. + /// + public bool InferTagFromNameDefault + { + get { return GetOption(OPTIONS_InferTagFromNameDefault); } + set { SetOption(OPTIONS_InferTagFromNameDefault, value); } + } + + /// + /// Global default that determines whether types are considered serializable + /// if they have [DataContract] / [XmlType]. With this enabled, ONLY + /// types marked as [ProtoContract] are added automatically. + /// + public bool AutoAddProtoContractTypesOnly + { + get { return GetOption(OPTIONS_AutoAddProtoContractTypesOnly); } + set { SetOption(OPTIONS_AutoAddProtoContractTypesOnly, value); } + } + + /// + /// Global switch that enables or disables the implicit + /// handling of "zero defaults"; meanning: if no other default is specified, + /// it assumes bools always default to false, integers to zero, etc. + /// + /// If this is disabled, no such assumptions are made and only *explicit* + /// default values are processed. This is enabled by default to + /// preserve similar logic to v1. + /// + public bool UseImplicitZeroDefaults + { + get { return GetOption(OPTIONS_UseImplicitZeroDefaults); } + set + { + if (!value && GetOption(OPTIONS_IsDefaultModel)) + { + throw new InvalidOperationException("UseImplicitZeroDefaults cannot be disabled on the default model"); + } + SetOption(OPTIONS_UseImplicitZeroDefaults, value); + } + } + + /// + /// Global switch that determines whether types with a .ToString() and a Parse(string) + /// should be serialized as strings. + /// + public bool AllowParseableTypes + { + get { return GetOption(OPTIONS_AllowParseableTypes); } + set { SetOption(OPTIONS_AllowParseableTypes, value); } + } + + /// + /// Global switch that determines whether DateTime serialization should include the Kind of the date/time. + /// + public bool IncludeDateTimeKind + { + get { return GetOption(OPTIONS_IncludeDateTimeKind); } + set { SetOption(OPTIONS_IncludeDateTimeKind, value); } + } + + /// + /// Global switch that determines whether a single instance of the same string should be used during deserialization. + /// + /// Note this does not use the global .NET string interner + public bool InternStrings + { + get { return !GetOption(OPTIONS_DoNotInternStrings); } + set { SetOption(OPTIONS_DoNotInternStrings, !value); } + } + + /// + /// Should the Kind be included on date/time values? + /// + protected internal override bool SerializeDateTimeKind() + { + return GetOption(OPTIONS_IncludeDateTimeKind); + } + + private sealed class Singleton + { + private Singleton() { } + internal static readonly RuntimeTypeModel Value = new RuntimeTypeModel(true); + } + + /// + /// The default model, used to support ProtoBuf.Serializer + /// + public static RuntimeTypeModel Default => Singleton.Value; + + /// + /// Returns a sequence of the Type instances that can be + /// processed by this model. + /// + public IEnumerable GetTypes() => types; + + /// + /// Suggest a .proto definition for the given type + /// + /// The type to generate a .proto definition for, or null to generate a .proto that represents the entire model + /// The .proto definition as a string + /// The .proto syntax to use + public override string GetSchema(Type type, ProtoSyntax syntax) + { + BasicList requiredTypes = new BasicList(); + MetaType primaryType = null; + bool isInbuiltType = false; + if (type == null) + { // generate for the entire model + foreach (MetaType meta in types) + { + MetaType tmp = meta.GetSurrogateOrBaseOrSelf(false); + if (!requiredTypes.Contains(tmp)) + { // ^^^ note that the type might have been added as a descendent + requiredTypes.Add(tmp); + CascadeDependents(requiredTypes, tmp); + } + } + } + else + { + Type tmp = Helpers.GetUnderlyingType(type); + if (tmp != null) type = tmp; + + WireType defaultWireType; + isInbuiltType = (ValueMember.TryGetCoreSerializer(this, DataFormat.Default, type, out defaultWireType, false, false, false, false) != null); + if (!isInbuiltType) + { + //Agenerate just relative to the supplied type + int index = FindOrAddAuto(type, false, false, false); + if (index < 0) throw new ArgumentException("The type specified is not a contract-type", "type"); + + // get the required types + primaryType = ((MetaType)types[index]).GetSurrogateOrBaseOrSelf(false); + requiredTypes.Add(primaryType); + CascadeDependents(requiredTypes, primaryType); + } + } + + // use the provided type's namespace for the "package" + StringBuilder headerBuilder = new StringBuilder(); + string package = null; + + if (!isInbuiltType) + { + IEnumerable typesForNamespace = primaryType == null ? types : requiredTypes; + foreach (MetaType meta in typesForNamespace) + { + if (meta.IsList) continue; + string tmp = meta.Type.Namespace; + if (!string.IsNullOrEmpty(tmp)) + { + if (tmp.StartsWith("System.")) continue; + if (package == null) + { // haven't seen any suggestions yet + package = tmp; + } + else if (package == tmp) + { // that's fine; a repeat of the one we already saw + } + else + { // something else; have confliucting suggestions; abort + package = null; + break; + } + } + } + } + switch (syntax) + { + case ProtoSyntax.Proto2: + headerBuilder.AppendLine(@"syntax = ""proto2"";"); + break; + case ProtoSyntax.Proto3: + headerBuilder.AppendLine(@"syntax = ""proto3"";"); + break; + default: + throw new ArgumentOutOfRangeException(nameof(syntax)); + } + + if (!string.IsNullOrEmpty(package)) + { + headerBuilder.Append("package ").Append(package).Append(';'); + Helpers.AppendLine(headerBuilder); + } + + var imports = CommonImports.None; + StringBuilder bodyBuilder = new StringBuilder(); + // sort them by schema-name + MetaType[] metaTypesArr = new MetaType[requiredTypes.Count]; + requiredTypes.CopyTo(metaTypesArr, 0); + Array.Sort(metaTypesArr, MetaType.Comparer.Default); + + // write the messages + if (isInbuiltType) + { + Helpers.AppendLine(bodyBuilder).Append("message ").Append(type.Name).Append(" {"); + MetaType.NewLine(bodyBuilder, 1).Append(syntax == ProtoSyntax.Proto2 ? "optional " : "").Append(GetSchemaTypeName(type, DataFormat.Default, false, false, ref imports)) + .Append(" value = 1;"); + Helpers.AppendLine(bodyBuilder).Append('}'); + } + else + { + for (int i = 0; i < metaTypesArr.Length; i++) + { + MetaType tmp = metaTypesArr[i]; + if (tmp.IsList && tmp != primaryType) continue; + tmp.WriteSchema(bodyBuilder, 0, ref imports, syntax); + } + } + if ((imports & CommonImports.Bcl) != 0) + { + headerBuilder.Append("import \"protobuf-net/bcl.proto\"; // schema for protobuf-net's handling of core .NET types"); + Helpers.AppendLine(headerBuilder); + } + if ((imports & CommonImports.Protogen) != 0) + { + headerBuilder.Append("import \"protobuf-net/protogen.proto\"; // custom protobuf-net options"); + Helpers.AppendLine(headerBuilder); + } + if ((imports & CommonImports.Timestamp) != 0) + { + headerBuilder.Append("import \"google/protobuf/timestamp.proto\";"); + Helpers.AppendLine(headerBuilder); + } + if ((imports & CommonImports.Duration) != 0) + { + headerBuilder.Append("import \"google/protobuf/duration.proto\";"); + Helpers.AppendLine(headerBuilder); + } + return Helpers.AppendLine(headerBuilder.Append(bodyBuilder)).ToString(); + } + [Flags] + internal enum CommonImports + { + None = 0, + Bcl = 1, + Timestamp = 2, + Duration = 4, + Protogen = 8 + } + private void CascadeDependents(BasicList list, MetaType metaType) + { + MetaType tmp; + if (metaType.IsList) + { + Type itemType = TypeModel.GetListItemType(this, metaType.Type); + TryGetCoreSerializer(list, itemType); + } + else + { + if (metaType.IsAutoTuple) + { + MemberInfo[] mapping; + if (MetaType.ResolveTupleConstructor(metaType.Type, out mapping) != null) + { + for (int i = 0; i < mapping.Length; i++) + { + Type type = null; + if (mapping[i] is PropertyInfo) type = ((PropertyInfo)mapping[i]).PropertyType; + else if (mapping[i] is FieldInfo) type = ((FieldInfo)mapping[i]).FieldType; + TryGetCoreSerializer(list, type); + } + } + } + else + { + foreach (ValueMember member in metaType.Fields) + { + Type type = member.ItemType; + if (member.IsMap) + { + member.ResolveMapTypes(out _, out _, out type); // don't need key-type + } + if (type == null) type = member.MemberType; + TryGetCoreSerializer(list, type); + } + } + foreach (var genericArgument in metaType.GetAllGenericArguments()) + { + TryGetCoreSerializer(list, genericArgument); + } + if (metaType.HasSubtypes) + { + foreach (SubType subType in metaType.GetSubtypes()) + { + tmp = subType.DerivedType.GetSurrogateOrSelf(); // note: exclude base-types! + if (!list.Contains(tmp)) + { + list.Add(tmp); + CascadeDependents(list, tmp); + } + } + } + tmp = metaType.BaseType; + if (tmp != null) tmp = tmp.GetSurrogateOrSelf(); // note: already walking base-types; exclude base + if (tmp != null && !list.Contains(tmp)) + { + list.Add(tmp); + CascadeDependents(list, tmp); + } + } + } + + private void TryGetCoreSerializer(BasicList list, Type itemType) + { + var coreSerializer = ValueMember.TryGetCoreSerializer(this, DataFormat.Default, itemType, out _, false, false, false, false); + if (coreSerializer != null) + { + return; + } + int index = FindOrAddAuto(itemType, false, false, false); + if (index < 0) + { + return; + } + var temp = ((MetaType)types[index]).GetSurrogateOrBaseOrSelf(false); + if (list.Contains(temp)) + { + return; + } + // could perhaps also implement as a queue, but this should work OK for sane models + list.Add(temp); + CascadeDependents(list, temp); + } + +#if !NO_RUNTIME + /// + /// Creates a new runtime model, to which the caller + /// can add support for a range of types. A model + /// can be used "as is", or can be compiled for + /// optimal performance. + /// + /// not used currently; this is for compatibility with v3 +#pragma warning disable IDE0060 // Remove unused parameter + public static RuntimeTypeModel Create(string name = null) +#pragma warning restore IDE0060 // Remove unused parameter + { + return new RuntimeTypeModel(false); + } +#endif + + private RuntimeTypeModel(bool isDefault) + { + AutoAddMissingTypes = true; + UseImplicitZeroDefaults = true; + SetOption(OPTIONS_IsDefaultModel, isDefault); +#if FEAT_COMPILER && !DEBUG + try + { + AutoCompile = EnableAutoCompile(); + } + catch { } // this is all kinds of brittle on things like UWP +#endif + } + +#if FEAT_COMPILER + [MethodImpl(MethodImplOptions.NoInlining)] + internal static bool EnableAutoCompile() + { + try + { + var dm = new DynamicMethod("CheckCompilerAvailable", typeof(bool), new Type[] { typeof(int) }); + var il = dm.GetILGenerator(); + il.Emit(OpCodes.Ldarg_0); + il.Emit(OpCodes.Ldc_I4, 42); + il.Emit(OpCodes.Ceq); + il.Emit(OpCodes.Ret); + var func = (Predicate)dm.CreateDelegate(typeof(Predicate)); + return func(42); + } + catch (Exception ex) + { + Debug.WriteLine(ex); + return false; + } + } +#endif + + /// + /// Obtains the MetaType associated with a given Type for the current model, + /// allowing additional configuration. + /// + public MetaType this[Type type] { get { return (MetaType)types[FindOrAddAuto(type, true, false, false)]; } } + + internal MetaType FindWithoutAdd(Type type) + { + // this list is thread-safe for reading + foreach (MetaType metaType in types) + { + if (metaType.Type == type) + { + if (metaType.Pending) WaitOnLock(metaType); + return metaType; + } + } + // if that failed, check for a proxy + Type underlyingType = ResolveProxies(type); + return underlyingType == null ? null : FindWithoutAdd(underlyingType); + } + + static readonly BasicList.MatchPredicate + MetaTypeFinder = new BasicList.MatchPredicate(MetaTypeFinderImpl), + BasicTypeFinder = new BasicList.MatchPredicate(BasicTypeFinderImpl); + + static bool MetaTypeFinderImpl(object value, object ctx) + { + return ((MetaType)value).Type == (Type)ctx; + } + + static bool BasicTypeFinderImpl(object value, object ctx) + { + return ((BasicType)value).Type == (Type)ctx; + } + + private void WaitOnLock(MetaType type) + { + int opaqueToken = 0; + try + { + TakeLock(ref opaqueToken); + } + finally + { + ReleaseLock(opaqueToken); + } + } + + BasicList basicTypes = new BasicList(); + + sealed class BasicType + { + private readonly Type type; + public Type Type => type; + private readonly IProtoSerializer serializer; + public IProtoSerializer Serializer => serializer; + + public BasicType(Type type, IProtoSerializer serializer) + { + this.type = type; + this.serializer = serializer; + } + } + internal IProtoSerializer TryGetBasicTypeSerializer(Type type) + { + int idx = basicTypes.IndexOf(BasicTypeFinder, type); + + if (idx >= 0) return ((BasicType)basicTypes[idx]).Serializer; + + lock (basicTypes) + { // don't need a full model lock for this + + // double-checked + idx = basicTypes.IndexOf(BasicTypeFinder, type); + if (idx >= 0) return ((BasicType)basicTypes[idx]).Serializer; + + MetaType.AttributeFamily family = MetaType.GetContractFamily(this, type, null); + IProtoSerializer ser = family == MetaType.AttributeFamily.None + ? ValueMember.TryGetCoreSerializer(this, DataFormat.Default, type, out WireType defaultWireType, false, false, false, false) + : null; + + if (ser != null) basicTypes.Add(new BasicType(type, ser)); + return ser; + } + } + + internal int FindOrAddAuto(Type type, bool demand, bool addWithContractOnly, bool addEvenIfAutoDisabled) + { + int key = types.IndexOf(MetaTypeFinder, type); + MetaType metaType; + + // the fast happy path: meta-types we've already seen + if (key >= 0) + { + metaType = (MetaType)types[key]; + if (metaType.Pending) + { + WaitOnLock(metaType); + } + return key; + } + + // the fast fail path: types that will never have a meta-type + bool shouldAdd = AutoAddMissingTypes || addEvenIfAutoDisabled; + + if (!Helpers.IsEnum(type) && TryGetBasicTypeSerializer(type) != null) + { + if (shouldAdd && !addWithContractOnly) throw MetaType.InbuiltType(type); + return -1; // this will never be a meta-type + } + + // otherwise: we don't yet know + + // check for proxy types + Type underlyingType = ResolveProxies(type); + if (underlyingType != null && underlyingType != type) + { + key = types.IndexOf(MetaTypeFinder, underlyingType); + type = underlyingType; // if new added, make it reflect the underlying type + } + + if (key < 0) + { + int opaqueToken = 0; + Type origType = type; + bool weAdded = false; + try + { + TakeLock(ref opaqueToken); + // try to recognise a few familiar patterns... + if ((metaType = RecogniseCommonTypes(type)) == null) + { // otherwise, check if it is a contract + MetaType.AttributeFamily family = MetaType.GetContractFamily(this, type, null); + if (family == MetaType.AttributeFamily.AutoTuple) + { + shouldAdd = addEvenIfAutoDisabled = true; // always add basic tuples, such as KeyValuePair + } + + if (!shouldAdd || ( + !Helpers.IsEnum(type) && addWithContractOnly && family == MetaType.AttributeFamily.None) + ) + { + if (demand) ThrowUnexpectedType(type); + return key; + } + metaType = Create(type); + } + + metaType.Pending = true; + + // double-checked + int winner = types.IndexOf(MetaTypeFinder, type); + if (winner < 0) + { + ThrowIfFrozen(); + key = types.Add(metaType); + weAdded = true; + } + else + { + key = winner; + } + if (weAdded) + { + metaType.ApplyDefaultBehaviour(); + metaType.Pending = false; + } + } + finally + { + ReleaseLock(opaqueToken); + if (weAdded) + { + ResetKeyCache(); + } + } + } + return key; + } + + private MetaType RecogniseCommonTypes(Type type) + { + // if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(System.Collections.Generic.KeyValuePair<,>)) + // { + // MetaType mt = new MetaType(this, type); + + // Type surrogate = typeof (KeyValuePairSurrogate<,>).MakeGenericType(type.GetGenericArguments()); + + // mt.SetSurrogate(surrogate); + // mt.IncludeSerializerMethod = false; + // mt.Freeze(); + + // MetaType surrogateMeta = (MetaType)types[FindOrAddAuto(surrogate, true, true, true)]; // this forcibly adds it if needed + // if(surrogateMeta.IncludeSerializerMethod) + // { // don't blindly set - it might be frozen + // surrogateMeta.IncludeSerializerMethod = false; + // } + // surrogateMeta.Freeze(); + // return mt; + // } + return null; + } + private MetaType Create(Type type) + { + ThrowIfFrozen(); + return new MetaType(this, type, defaultFactory); + } + + /// + /// Adds support for an additional type in this model, optionally + /// applying inbuilt patterns. If the type is already known to the + /// model, the existing type is returned **without** applying + /// any additional behaviour. + /// + /// Inbuilt patterns include: + /// [ProtoContract]/[ProtoMember(n)] + /// [DataContract]/[DataMember(Order=n)] + /// [XmlType]/[XmlElement(Order=n)] + /// [On{Des|S}erializ{ing|ed}] + /// ShouldSerialize*/*Specified + /// + /// The type to be supported + /// Whether to apply the inbuilt configuration patterns (via attributes etc), or + /// just add the type with no additional configuration (the type must then be manually configured). + /// The MetaType representing this type, allowing + /// further configuration. + public MetaType Add(Type type, bool applyDefaultBehaviour) + { + if (type == null) throw new ArgumentNullException("type"); + MetaType newType = FindWithoutAdd(type); + if (newType != null) return newType; // return existing + int opaqueToken = 0; + +#if COREFX || PROFILE259 + TypeInfo typeInfo = IntrospectionExtensions.GetTypeInfo(type); + if (typeInfo.IsInterface && MetaType.ienumerable.IsAssignableFrom(typeInfo) +#else + if (type.IsInterface && MapType(MetaType.ienumerable).IsAssignableFrom(type) +#endif + && GetListItemType(this, type) == null) + { + throw new ArgumentException("IEnumerable[] data cannot be used as a meta-type unless an Add method can be resolved"); + } + try + { + newType = RecogniseCommonTypes(type); + if (newType != null) + { + if (!applyDefaultBehaviour) + { + throw new ArgumentException( + "Default behaviour must be observed for certain types with special handling; " + type.FullName, + "applyDefaultBehaviour"); + } + // we should assume that type is fully configured, though; no need to re-run: + applyDefaultBehaviour = false; + } + if (newType == null) newType = Create(type); + newType.Pending = true; + TakeLock(ref opaqueToken); + // double checked + if (FindWithoutAdd(type) != null) throw new ArgumentException("Duplicate type", "type"); + ThrowIfFrozen(); + types.Add(newType); + if (applyDefaultBehaviour) { newType.ApplyDefaultBehaviour(); } + newType.Pending = false; + } + finally + { + ReleaseLock(opaqueToken); + ResetKeyCache(); + } + + return newType; + } + +#if FEAT_COMPILER + /// + /// Should serializers be compiled on demand? It may be useful + /// to disable this for debugging purposes. + /// + public bool AutoCompile + { + get { return GetOption(OPTIONS_AutoCompile); } + set { SetOption(OPTIONS_AutoCompile, value); } + } +#endif + /// + /// Should support for unexpected types be added automatically? + /// If false, an exception is thrown when unexpected types + /// are encountered. + /// + public bool AutoAddMissingTypes + { + get { return GetOption(OPTIONS_AutoAddMissingTypes); } + set + { + if (!value && GetOption(OPTIONS_IsDefaultModel)) + { + throw new InvalidOperationException("The default model must allow missing types"); + } + ThrowIfFrozen(); + SetOption(OPTIONS_AutoAddMissingTypes, value); + } + } + /// + /// Verifies that the model is still open to changes; if not, an exception is thrown + /// + private void ThrowIfFrozen() + { + if (GetOption(OPTIONS_Frozen)) throw new InvalidOperationException("The model cannot be changed once frozen"); + } + + /// + /// Prevents further changes to this model + /// + public void Freeze() + { + if (GetOption(OPTIONS_IsDefaultModel)) throw new InvalidOperationException("The default model cannot be frozen"); + SetOption(OPTIONS_Frozen, true); + } + + private readonly BasicList types = new BasicList(); + + /// + /// Provides the key that represents a given type in the current model. + /// + protected override int GetKeyImpl(Type type) + { + return GetKey(type, false, true); + } + + internal int GetKey(Type type, bool demand, bool getBaseKey) + { + Helpers.DebugAssert(type != null); + try + { + int typeIndex = FindOrAddAuto(type, demand, true, false); + if (typeIndex >= 0) + { + MetaType mt = (MetaType)types[typeIndex]; + if (getBaseKey) + { + mt = MetaType.GetRootType(mt); + typeIndex = FindOrAddAuto(mt.Type, true, true, false); + } + } + return typeIndex; + } + catch (NotSupportedException) + { + throw; // re-surface "as-is" + } + catch (Exception ex) + { + if (ex.Message.IndexOf(type.FullName) >= 0) throw; // already enough info + throw new ProtoException(ex.Message + " (" + type.FullName + ")", ex); + } + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream. + /// + /// Represents the type (including inheritance) to consider. + /// The existing instance to be serialized (cannot be null). + /// The destination stream to write to. + protected internal override void Serialize(int key, object value, ProtoWriter dest) + { + //Helpers.DebugWriteLine("Serialize", value); + ((MetaType)types[key]).Serializer.Write(value, dest); + } + + /// + /// Applies a protocol-buffer stream to an existing instance (which may be null). + /// + /// Represents the type (including inheritance) to consider. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + protected internal override object Deserialize(int key, object value, ProtoReader source) + { + //Helpers.DebugWriteLine("Deserialize", value); + IProtoSerializer ser = ((MetaType)types[key]).Serializer; + if (value == null && Helpers.IsValueType(ser.ExpectedType)) + { + if (ser.RequiresOldValue) value = Activator.CreateInstance(ser.ExpectedType); + return ser.Read(value, source); + } + else + { + return ser.Read(value, source); + } + } + +#if FEAT_COMPILER + // this is used by some unit-tests; do not remove + internal Compiler.ProtoSerializer GetSerializer(IProtoSerializer serializer, bool compiled) + { + if (serializer == null) throw new ArgumentNullException("serializer"); +#if FEAT_COMPILER + if (compiled) return Compiler.CompilerContext.BuildSerializer(serializer, this); +#endif + return new Compiler.ProtoSerializer(serializer.Write); + } + + /// + /// Compiles the serializers individually; this is *not* a full + /// standalone compile, but can significantly boost performance + /// while allowing additional types to be added. + /// + /// An in-place compile can access non-public types / members + public void CompileInPlace() + { + foreach (MetaType type in types) + { + type.CompileInPlace(); + } + } + +#endif + //internal override IProtoSerializer GetTypeSerializer(Type type) + //{ // this list is thread-safe for reading + // .Serializer; + //} + //internal override IProtoSerializer GetTypeSerializer(int key) + //{ // this list is thread-safe for reading + // MetaType type = (MetaType)types.TryGet(key); + // if (type != null) return type.Serializer; + // throw new KeyNotFoundException(); + + //} + +#if FEAT_COMPILER + private void BuildAllSerializers() + { + // note that types.Count may increase during this operation, as some serializers + // bring other types into play + for (int i = 0; i < types.Count; i++) + { + // the primary purpose of this is to force the creation of the Serializer + MetaType mt = (MetaType)types[i]; + if (mt.Serializer == null) + throw new InvalidOperationException("No serializer available for " + mt.Type.Name); + } + } + + internal sealed class SerializerPair : IComparable + { + int IComparable.CompareTo(object obj) + { + if (obj == null) throw new ArgumentException("obj"); + SerializerPair other = (SerializerPair)obj; + + // we want to bunch all the items with the same base-type together, but we need the items with a + // different base **first**. + if (this.BaseKey == this.MetaKey) + { + if (other.BaseKey == other.MetaKey) + { // neither is a subclass + return this.MetaKey.CompareTo(other.MetaKey); + } + else + { // "other" (only) is involved in inheritance; "other" should be first + return 1; + } + } + else + { + if (other.BaseKey == other.MetaKey) + { // "this" (only) is involved in inheritance; "this" should be first + return -1; + } + else + { // both are involved in inheritance + int result = this.BaseKey.CompareTo(other.BaseKey); + if (result == 0) result = this.MetaKey.CompareTo(other.MetaKey); + return result; + } + } + } + public readonly int MetaKey, BaseKey; + public readonly MetaType Type; + public readonly MethodBuilder Serialize, Deserialize; + public readonly ILGenerator SerializeBody, DeserializeBody; + public SerializerPair(int metaKey, int baseKey, MetaType type, MethodBuilder serialize, MethodBuilder deserialize, + ILGenerator serializeBody, ILGenerator deserializeBody) + { + this.MetaKey = metaKey; + this.BaseKey = baseKey; + this.Serialize = serialize; + this.Deserialize = deserialize; + this.SerializeBody = serializeBody; + this.DeserializeBody = deserializeBody; + this.Type = type; + } + } + + /// + /// Fully compiles the current model into a static-compiled model instance + /// + /// A full compilation is restricted to accessing public types / members + /// An instance of the newly created compiled type-model + public TypeModel Compile() + { + CompilerOptions options = new CompilerOptions(); + return Compile(options); + } + + static ILGenerator Override(TypeBuilder type, string name) + { + MethodInfo baseMethod = type.BaseType.GetMethod(name, BindingFlags.NonPublic | BindingFlags.Instance); + + ParameterInfo[] parameters = baseMethod.GetParameters(); + Type[] paramTypes = new Type[parameters.Length]; + for (int i = 0; i < paramTypes.Length; i++) + { + paramTypes[i] = parameters[i].ParameterType; + } + MethodBuilder newMethod = type.DefineMethod(baseMethod.Name, + (baseMethod.Attributes & ~MethodAttributes.Abstract) | MethodAttributes.Final, baseMethod.CallingConvention, baseMethod.ReturnType, paramTypes); + ILGenerator il = newMethod.GetILGenerator(); + type.DefineMethodOverride(newMethod, baseMethod); + return il; + } + + /// + /// Represents configuration options for compiling a model to + /// a standalone assembly. + /// + public sealed class CompilerOptions + { + /// + /// Import framework options from an existing type + /// + public void SetFrameworkOptions(MetaType from) + { + if (from == null) throw new ArgumentNullException("from"); + AttributeMap[] attribs = AttributeMap.Create(from.Model, Helpers.GetAssembly(from.Type)); + foreach (AttributeMap attrib in attribs) + { + if (attrib.AttributeType.FullName == "System.Runtime.Versioning.TargetFrameworkAttribute") + { + object tmp; + if (attrib.TryGet("FrameworkName", out tmp)) TargetFrameworkName = (string)tmp; + if (attrib.TryGet("FrameworkDisplayName", out tmp)) TargetFrameworkDisplayName = (string)tmp; + break; + } + } + } + + private string targetFrameworkName, targetFrameworkDisplayName, typeName, outputPath, imageRuntimeVersion; + private int metaDataVersion; + /// + /// The TargetFrameworkAttribute FrameworkName value to burn into the generated assembly + /// + public string TargetFrameworkName { get { return targetFrameworkName; } set { targetFrameworkName = value; } } + + /// + /// The TargetFrameworkAttribute FrameworkDisplayName value to burn into the generated assembly + /// + public string TargetFrameworkDisplayName { get { return targetFrameworkDisplayName; } set { targetFrameworkDisplayName = value; } } + /// + /// The name of the TypeModel class to create + /// + public string TypeName { get { return typeName; } set { typeName = value; } } + +#if COREFX + internal const string NoPersistence = "Assembly persistence not supported on this runtime"; +#endif + /// + /// The path for the new dll + /// +#if COREFX + [Obsolete(NoPersistence)] +#endif + public string OutputPath { get { return outputPath; } set { outputPath = value; } } + /// + /// The runtime version for the generated assembly + /// + public string ImageRuntimeVersion { get { return imageRuntimeVersion; } set { imageRuntimeVersion = value; } } + /// + /// The runtime version for the generated assembly + /// + public int MetaDataVersion { get { return metaDataVersion; } set { metaDataVersion = value; } } + + + private Accessibility accessibility = Accessibility.Public; + /// + /// The acecssibility of the generated serializer + /// + public Accessibility Accessibility { get { return accessibility; } set { accessibility = value; } } + } + + /// + /// Type accessibility + /// + public enum Accessibility + { + /// + /// Available to all callers + /// + Public, + /// + /// Available to all callers in the same assembly, or assemblies specified via [InternalsVisibleTo(...)] + /// + Internal + } + +#if !COREFX + /// + /// Fully compiles the current model into a static-compiled serialization dll + /// (the serialization dll still requires protobuf-net for support services). + /// + /// A full compilation is restricted to accessing public types / members + /// The name of the TypeModel class to create + /// The path for the new dll + /// An instance of the newly created compiled type-model + public TypeModel Compile(string name, string path) + { + CompilerOptions options = new CompilerOptions(); + options.TypeName = name; + options.OutputPath = path; + return Compile(options); + } +#endif + /// + /// Fully compiles the current model into a static-compiled serialization dll + /// (the serialization dll still requires protobuf-net for support services). + /// + /// A full compilation is restricted to accessing public types / members + /// An instance of the newly created compiled type-model + public TypeModel Compile(CompilerOptions options) + { + if (options == null) throw new ArgumentNullException("options"); + string typeName = options.TypeName; +#pragma warning disable 0618 + string path = options.OutputPath; +#pragma warning restore 0618 + BuildAllSerializers(); + Freeze(); + bool save = !string.IsNullOrEmpty(path); + if (string.IsNullOrEmpty(typeName)) + { + if (save) throw new ArgumentNullException("typeName"); + typeName = Guid.NewGuid().ToString(); + } + + + string assemblyName, moduleName; + if (path == null) + { + assemblyName = typeName; + moduleName = assemblyName + ".dll"; + } + else + { + assemblyName = new System.IO.FileInfo(System.IO.Path.GetFileNameWithoutExtension(path)).Name; + moduleName = assemblyName + System.IO.Path.GetExtension(path); + } + +#if COREFX + AssemblyName an = new AssemblyName(); + an.Name = assemblyName; + AssemblyBuilder asm = AssemblyBuilder.DefineDynamicAssembly(an, + AssemblyBuilderAccess.Run); + ModuleBuilder module = asm.DefineDynamicModule(moduleName); +#else + AssemblyName an = new AssemblyName(); + an.Name = assemblyName; + AssemblyBuilder asm = AppDomain.CurrentDomain.DefineDynamicAssembly(an, + (save ? AssemblyBuilderAccess.RunAndSave : AssemblyBuilderAccess.Run) + ); + ModuleBuilder module = save ? asm.DefineDynamicModule(moduleName, path) + : asm.DefineDynamicModule(moduleName); +#endif + + WriteAssemblyAttributes(options, assemblyName, asm); + + TypeBuilder type = WriteBasicTypeModel(options, typeName, module); + + int index; + bool hasInheritance; + SerializerPair[] methodPairs; + Compiler.CompilerContext.ILVersion ilVersion; + WriteSerializers(options, assemblyName, type, out index, out hasInheritance, out methodPairs, out ilVersion); + + ILGenerator il; + int knownTypesCategory; + FieldBuilder knownTypes; + Type knownTypesLookupType; + WriteGetKeyImpl(type, hasInheritance, methodPairs, ilVersion, assemblyName, out il, out knownTypesCategory, out knownTypes, out knownTypesLookupType); + + // trivial flags + il = Override(type, "SerializeDateTimeKind"); + il.Emit(IncludeDateTimeKind ? OpCodes.Ldc_I4_1 : OpCodes.Ldc_I4_0); + il.Emit(OpCodes.Ret); + // end: trivial flags + + Compiler.CompilerContext ctx = WriteSerializeDeserialize(assemblyName, type, methodPairs, ilVersion, ref il); + + WriteConstructors(type, ref index, methodPairs, ref il, knownTypesCategory, knownTypes, knownTypesLookupType, ctx); + + +#if COREFX + Type finalType = type.CreateTypeInfo().AsType(); +#else + Type finalType = type.CreateType(); +#endif + if (!string.IsNullOrEmpty(path)) + { +#if COREFX + throw new NotSupportedException(CompilerOptions.NoPersistence); +#else + try + { + asm.Save(path); + } + catch (IOException ex) + { + // advertise the file info + throw new IOException(path + ", " + ex.Message, ex); + } + Helpers.DebugWriteLine("Wrote dll:" + path); +#endif + } + return (TypeModel)Activator.CreateInstance(finalType); + } + + private void WriteConstructors(TypeBuilder type, ref int index, SerializerPair[] methodPairs, ref ILGenerator il, int knownTypesCategory, FieldBuilder knownTypes, Type knownTypesLookupType, Compiler.CompilerContext ctx) + { + type.DefineDefaultConstructor(MethodAttributes.Public); + il = type.DefineTypeInitializer().GetILGenerator(); + switch (knownTypesCategory) + { + case KnownTypes_Array: + { + Compiler.CompilerContext.LoadValue(il, types.Count); + il.Emit(OpCodes.Newarr, ctx.MapType(typeof(System.Type))); + index = 0; + foreach (SerializerPair pair in methodPairs) + { + il.Emit(OpCodes.Dup); + Compiler.CompilerContext.LoadValue(il, index); + il.Emit(OpCodes.Ldtoken, pair.Type.Type); + il.EmitCall(OpCodes.Call, ctx.MapType(typeof(System.Type)).GetMethod("GetTypeFromHandle"), null); + il.Emit(OpCodes.Stelem_Ref); + index++; + } + il.Emit(OpCodes.Stsfld, knownTypes); + il.Emit(OpCodes.Ret); + } + break; + case KnownTypes_Dictionary: + { + Compiler.CompilerContext.LoadValue(il, types.Count); + //LocalBuilder loc = il.DeclareLocal(knownTypesLookupType); + il.Emit(OpCodes.Newobj, knownTypesLookupType.GetConstructor(new Type[] { MapType(typeof(int)) })); + il.Emit(OpCodes.Stsfld, knownTypes); + int typeIndex = 0; + foreach (SerializerPair pair in methodPairs) + { + il.Emit(OpCodes.Ldsfld, knownTypes); + il.Emit(OpCodes.Ldtoken, pair.Type.Type); + il.EmitCall(OpCodes.Call, ctx.MapType(typeof(System.Type)).GetMethod("GetTypeFromHandle"), null); + int keyIndex = typeIndex++, lastKey = pair.BaseKey; + if (lastKey != pair.MetaKey) // not a base-type; need to give the index of the base-type + { + keyIndex = -1; // assume epic fail + for (int j = 0; j < methodPairs.Length; j++) + { + if (methodPairs[j].BaseKey == lastKey && methodPairs[j].MetaKey == lastKey) + { + keyIndex = j; + break; + } + } + } + Compiler.CompilerContext.LoadValue(il, keyIndex); + il.EmitCall(OpCodes.Callvirt, knownTypesLookupType.GetMethod("Add", new Type[] { MapType(typeof(System.Type)), MapType(typeof(int)) }), null); + } + il.Emit(OpCodes.Ret); + } + break; + case KnownTypes_Hashtable: + { + Compiler.CompilerContext.LoadValue(il, types.Count); + il.Emit(OpCodes.Newobj, knownTypesLookupType.GetConstructor(new Type[] { MapType(typeof(int)) })); + il.Emit(OpCodes.Stsfld, knownTypes); + int typeIndex = 0; + foreach (SerializerPair pair in methodPairs) + { + il.Emit(OpCodes.Ldsfld, knownTypes); + il.Emit(OpCodes.Ldtoken, pair.Type.Type); + il.EmitCall(OpCodes.Call, ctx.MapType(typeof(System.Type)).GetMethod("GetTypeFromHandle"), null); + int keyIndex = typeIndex++, lastKey = pair.BaseKey; + if (lastKey != pair.MetaKey) // not a base-type; need to give the index of the base-type + { + keyIndex = -1; // assume epic fail + for (int j = 0; j < methodPairs.Length; j++) + { + if (methodPairs[j].BaseKey == lastKey && methodPairs[j].MetaKey == lastKey) + { + keyIndex = j; + break; + } + } + } + Compiler.CompilerContext.LoadValue(il, keyIndex); + il.Emit(OpCodes.Box, MapType(typeof(int))); + il.EmitCall(OpCodes.Callvirt, knownTypesLookupType.GetMethod("Add", new Type[] { MapType(typeof(object)), MapType(typeof(object)) }), null); + } + il.Emit(OpCodes.Ret); + } + break; + default: + throw new InvalidOperationException(); + } + } + + private Compiler.CompilerContext WriteSerializeDeserialize(string assemblyName, TypeBuilder type, SerializerPair[] methodPairs, Compiler.CompilerContext.ILVersion ilVersion, ref ILGenerator il) + { + il = Override(type, "Serialize"); + Compiler.CompilerContext ctx = new Compiler.CompilerContext(il, false, true, methodPairs, this, ilVersion, assemblyName, MapType(typeof(object)), "Serialize " + type.Name); + // arg0 = this, arg1 = key, arg2=obj, arg3=dest + Compiler.CodeLabel[] jumpTable = new Compiler.CodeLabel[types.Count]; + for (int i = 0; i < jumpTable.Length; i++) + { + jumpTable[i] = ctx.DefineLabel(); + } + il.Emit(OpCodes.Ldarg_1); + ctx.Switch(jumpTable); + ctx.Return(); + for (int i = 0; i < jumpTable.Length; i++) + { + SerializerPair pair = methodPairs[i]; + ctx.MarkLabel(jumpTable[i]); + il.Emit(OpCodes.Ldarg_2); + ctx.CastFromObject(pair.Type.Type); + il.Emit(OpCodes.Ldarg_3); + il.EmitCall(OpCodes.Call, pair.Serialize, null); + ctx.Return(); + } + + il = Override(type, "Deserialize"); + ctx = new Compiler.CompilerContext(il, false, false, methodPairs, this, ilVersion, assemblyName, MapType(typeof(object)), "Deserialize " + type.Name); + // arg0 = this, arg1 = key, arg2=obj, arg3=source + for (int i = 0; i < jumpTable.Length; i++) + { + jumpTable[i] = ctx.DefineLabel(); + } + il.Emit(OpCodes.Ldarg_1); + ctx.Switch(jumpTable); + ctx.LoadNullRef(); + ctx.Return(); + for (int i = 0; i < jumpTable.Length; i++) + { + SerializerPair pair = methodPairs[i]; + ctx.MarkLabel(jumpTable[i]); + Type keyType = pair.Type.Type; + if (Helpers.IsValueType(keyType)) + { + il.Emit(OpCodes.Ldarg_2); + il.Emit(OpCodes.Ldarg_3); + il.EmitCall(OpCodes.Call, EmitBoxedSerializer(type, i, keyType, methodPairs, this, ilVersion, assemblyName), null); + ctx.Return(); + } + else + { + il.Emit(OpCodes.Ldarg_2); + ctx.CastFromObject(keyType); + il.Emit(OpCodes.Ldarg_3); + il.EmitCall(OpCodes.Call, pair.Deserialize, null); + ctx.Return(); + } + } + return ctx; + } + + private const int KnownTypes_Array = 1, KnownTypes_Dictionary = 2, KnownTypes_Hashtable = 3, KnownTypes_ArrayCutoff = 20; + private void WriteGetKeyImpl(TypeBuilder type, bool hasInheritance, SerializerPair[] methodPairs, Compiler.CompilerContext.ILVersion ilVersion, string assemblyName, out ILGenerator il, out int knownTypesCategory, out FieldBuilder knownTypes, out Type knownTypesLookupType) + { + + il = Override(type, "GetKeyImpl"); + Compiler.CompilerContext ctx = new Compiler.CompilerContext(il, false, false, methodPairs, this, ilVersion, assemblyName, MapType(typeof(System.Type), true), "GetKeyImpl"); + + + if (types.Count <= KnownTypes_ArrayCutoff) + { + knownTypesCategory = KnownTypes_Array; + knownTypesLookupType = MapType(typeof(System.Type[]), true); + } + else + { + knownTypesLookupType = MapType(typeof(System.Collections.Generic.Dictionary), false); + +#if !COREFX + if (knownTypesLookupType == null) + { + knownTypesLookupType = MapType(typeof(Hashtable), true); + knownTypesCategory = KnownTypes_Hashtable; + } + else +#endif + { + knownTypesCategory = KnownTypes_Dictionary; + } + } + knownTypes = type.DefineField("knownTypes", knownTypesLookupType, FieldAttributes.Private | FieldAttributes.InitOnly | FieldAttributes.Static); + + switch (knownTypesCategory) + { + case KnownTypes_Array: + { + il.Emit(OpCodes.Ldsfld, knownTypes); + il.Emit(OpCodes.Ldarg_1); + // note that Array.IndexOf is not supported under CF + il.EmitCall(OpCodes.Callvirt, MapType(typeof(IList)).GetMethod( + "IndexOf", new Type[] { MapType(typeof(object)) }), null); + if (hasInheritance) + { + il.DeclareLocal(MapType(typeof(int))); // loc-0 + il.Emit(OpCodes.Dup); + il.Emit(OpCodes.Stloc_0); + + BasicList getKeyLabels = new BasicList(); + int lastKey = -1; + for (int i = 0; i < methodPairs.Length; i++) + { + if (methodPairs[i].MetaKey == methodPairs[i].BaseKey) break; + if (lastKey == methodPairs[i].BaseKey) + { // add the last label again + getKeyLabels.Add(getKeyLabels[getKeyLabels.Count - 1]); + } + else + { // add a new unique label + getKeyLabels.Add(ctx.DefineLabel()); + lastKey = methodPairs[i].BaseKey; + } + } + Compiler.CodeLabel[] subtypeLabels = new Compiler.CodeLabel[getKeyLabels.Count]; + getKeyLabels.CopyTo(subtypeLabels, 0); + + ctx.Switch(subtypeLabels); + il.Emit(OpCodes.Ldloc_0); // not a sub-type; use the original value + il.Emit(OpCodes.Ret); + + lastKey = -1; + // now output the different branches per sub-type (not derived type) + for (int i = subtypeLabels.Length - 1; i >= 0; i--) + { + if (lastKey != methodPairs[i].BaseKey) + { + lastKey = methodPairs[i].BaseKey; + // find the actual base-index for this base-key (i.e. the index of + // the base-type) + int keyIndex = -1; + for (int j = subtypeLabels.Length; j < methodPairs.Length; j++) + { + if (methodPairs[j].BaseKey == lastKey && methodPairs[j].MetaKey == lastKey) + { + keyIndex = j; + break; + } + } + ctx.MarkLabel(subtypeLabels[i]); + Compiler.CompilerContext.LoadValue(il, keyIndex); + il.Emit(OpCodes.Ret); + } + } + } + else + { + il.Emit(OpCodes.Ret); + } + } + break; + case KnownTypes_Dictionary: + { + LocalBuilder result = il.DeclareLocal(MapType(typeof(int))); + Label otherwise = il.DefineLabel(); + il.Emit(OpCodes.Ldsfld, knownTypes); + il.Emit(OpCodes.Ldarg_1); + il.Emit(OpCodes.Ldloca_S, result); + il.EmitCall(OpCodes.Callvirt, knownTypesLookupType.GetMethod("TryGetValue", BindingFlags.Instance | BindingFlags.Public), null); + il.Emit(OpCodes.Brfalse_S, otherwise); + il.Emit(OpCodes.Ldloc_S, result); + il.Emit(OpCodes.Ret); + il.MarkLabel(otherwise); + il.Emit(OpCodes.Ldc_I4_M1); + il.Emit(OpCodes.Ret); + } + break; + case KnownTypes_Hashtable: + { + Label otherwise = il.DefineLabel(); + il.Emit(OpCodes.Ldsfld, knownTypes); + il.Emit(OpCodes.Ldarg_1); + il.EmitCall(OpCodes.Callvirt, knownTypesLookupType.GetProperty("Item").GetGetMethod(), null); + il.Emit(OpCodes.Dup); + il.Emit(OpCodes.Brfalse_S, otherwise); + if (ilVersion == Compiler.CompilerContext.ILVersion.Net1) + { + il.Emit(OpCodes.Unbox, MapType(typeof(int))); + il.Emit(OpCodes.Ldobj, MapType(typeof(int))); + } + else + { + il.Emit(OpCodes.Unbox_Any, MapType(typeof(int))); + } + il.Emit(OpCodes.Ret); + il.MarkLabel(otherwise); + il.Emit(OpCodes.Pop); + il.Emit(OpCodes.Ldc_I4_M1); + il.Emit(OpCodes.Ret); + } + break; + default: + throw new InvalidOperationException(); + } + } + + private void WriteSerializers(CompilerOptions options, string assemblyName, TypeBuilder type, out int index, out bool hasInheritance, out SerializerPair[] methodPairs, out Compiler.CompilerContext.ILVersion ilVersion) + { + Compiler.CompilerContext ctx; + + index = 0; + hasInheritance = false; + methodPairs = new SerializerPair[types.Count]; + foreach (MetaType metaType in types) + { + MethodBuilder writeMethod = type.DefineMethod("Write" +#if DEBUG + + metaType.Type.Name +#endif +, + MethodAttributes.Private | MethodAttributes.Static, CallingConventions.Standard, + MapType(typeof(void)), new Type[] { metaType.Type, MapType(typeof(ProtoWriter)) }); + + MethodBuilder readMethod = type.DefineMethod("Read" +#if DEBUG + + metaType.Type.Name +#endif +, + MethodAttributes.Private | MethodAttributes.Static, CallingConventions.Standard, + metaType.Type, new Type[] { metaType.Type, MapType(typeof(ProtoReader)) }); + + SerializerPair pair = new SerializerPair( + GetKey(metaType.Type, true, false), GetKey(metaType.Type, true, true), metaType, + writeMethod, readMethod, writeMethod.GetILGenerator(), readMethod.GetILGenerator()); + methodPairs[index++] = pair; + if (pair.MetaKey != pair.BaseKey) hasInheritance = true; + } + + if (hasInheritance) + { + Array.Sort(methodPairs); + } + + ilVersion = Compiler.CompilerContext.ILVersion.Net2; + if (options.MetaDataVersion == 0x10000) + { + ilVersion = Compiler.CompilerContext.ILVersion.Net1; // old-school! + } + for (index = 0; index < methodPairs.Length; index++) + { + SerializerPair pair = methodPairs[index]; + ctx = new Compiler.CompilerContext(pair.SerializeBody, true, true, methodPairs, this, ilVersion, assemblyName, pair.Type.Type, "SerializeImpl " + pair.Type.Type.Name); + MemberInfo returnType = pair.Deserialize.ReturnType +#if COREFX + .GetTypeInfo() +#endif + ; + ctx.CheckAccessibility(ref returnType); + pair.Type.Serializer.EmitWrite(ctx, ctx.InputValue); + ctx.Return(); + + ctx = new Compiler.CompilerContext(pair.DeserializeBody, true, false, methodPairs, this, ilVersion, assemblyName, pair.Type.Type, "DeserializeImpl " + pair.Type.Type.Name); + pair.Type.Serializer.EmitRead(ctx, ctx.InputValue); + if (!pair.Type.Serializer.ReturnsValue) + { + ctx.LoadValue(ctx.InputValue); + } + ctx.Return(); + } + } + + private TypeBuilder WriteBasicTypeModel(CompilerOptions options, string typeName, ModuleBuilder module) + { + Type baseType = MapType(typeof(TypeModel)); +#if COREFX + TypeAttributes typeAttributes = (baseType.GetTypeInfo().Attributes & ~TypeAttributes.Abstract) | TypeAttributes.Sealed; +#else + TypeAttributes typeAttributes = (baseType.Attributes & ~TypeAttributes.Abstract) | TypeAttributes.Sealed; +#endif + if (options.Accessibility == Accessibility.Internal) + { + typeAttributes &= ~TypeAttributes.Public; + } + + TypeBuilder type = module.DefineType(typeName, typeAttributes, baseType); + return type; + } + + private void WriteAssemblyAttributes(CompilerOptions options, string assemblyName, AssemblyBuilder asm) + { + if (!string.IsNullOrEmpty(options.TargetFrameworkName)) + { + // get [TargetFramework] from mscorlib/equivalent and burn into the new assembly + Type versionAttribType = null; + try + { // this is best-endeavours only + versionAttribType = GetType("System.Runtime.Versioning.TargetFrameworkAttribute", Helpers.GetAssembly(MapType(typeof(string)))); + } + catch { /* don't stress */ } + if (versionAttribType != null) + { + PropertyInfo[] props; + object[] propValues; + if (string.IsNullOrEmpty(options.TargetFrameworkDisplayName)) + { + props = new PropertyInfo[0]; + propValues = new object[0]; + } + else + { + props = new PropertyInfo[1] { versionAttribType.GetProperty("FrameworkDisplayName") }; + propValues = new object[1] { options.TargetFrameworkDisplayName }; + } + CustomAttributeBuilder builder = new CustomAttributeBuilder( + versionAttribType.GetConstructor(new Type[] { MapType(typeof(string)) }), + new object[] { options.TargetFrameworkName }, + props, + propValues); + asm.SetCustomAttribute(builder); + } + } + + // copy assembly:InternalsVisibleTo + Type internalsVisibleToAttribType = null; + + try + { + internalsVisibleToAttribType = MapType(typeof(System.Runtime.CompilerServices.InternalsVisibleToAttribute)); + } + catch { /* best endeavors only */ } + + if (internalsVisibleToAttribType != null) + { + BasicList internalAssemblies = new BasicList(), consideredAssemblies = new BasicList(); + foreach (MetaType metaType in types) + { + Assembly assembly = Helpers.GetAssembly(metaType.Type); + if (consideredAssemblies.IndexOfReference(assembly) >= 0) continue; + consideredAssemblies.Add(assembly); + + AttributeMap[] assemblyAttribsMap = AttributeMap.Create(this, assembly); + for (int i = 0; i < assemblyAttribsMap.Length; i++) + { + + if (assemblyAttribsMap[i].AttributeType != internalsVisibleToAttribType) continue; + + object privelegedAssemblyObj; + assemblyAttribsMap[i].TryGet("AssemblyName", out privelegedAssemblyObj); + string privelegedAssemblyName = privelegedAssemblyObj as string; + if (privelegedAssemblyName == assemblyName || string.IsNullOrEmpty(privelegedAssemblyName)) continue; // ignore + + if (internalAssemblies.IndexOfString(privelegedAssemblyName) >= 0) continue; // seen it before + internalAssemblies.Add(privelegedAssemblyName); + + CustomAttributeBuilder builder = new CustomAttributeBuilder( + internalsVisibleToAttribType.GetConstructor(new Type[] { MapType(typeof(string)) }), + new object[] { privelegedAssemblyName }); + asm.SetCustomAttribute(builder); + } + } + } + } + + private static MethodBuilder EmitBoxedSerializer(TypeBuilder type, int i, Type valueType, SerializerPair[] methodPairs, TypeModel model, Compiler.CompilerContext.ILVersion ilVersion, string assemblyName) + { + MethodInfo dedicated = methodPairs[i].Deserialize; + MethodBuilder boxedSerializer = type.DefineMethod("_" + i.ToString(), MethodAttributes.Static, CallingConventions.Standard, + model.MapType(typeof(object)), new Type[] { model.MapType(typeof(object)), model.MapType(typeof(ProtoReader)) }); + Compiler.CompilerContext ctx = new Compiler.CompilerContext(boxedSerializer.GetILGenerator(), true, false, methodPairs, model, ilVersion, assemblyName, model.MapType(typeof(object)), "BoxedSerializer " + valueType.Name); + ctx.LoadValue(ctx.InputValue); + Compiler.CodeLabel @null = ctx.DefineLabel(); + ctx.BranchIfFalse(@null, true); + + Type mappedValueType = valueType; + ctx.LoadValue(ctx.InputValue); + ctx.CastFromObject(mappedValueType); + ctx.LoadReaderWriter(); + ctx.EmitCall(dedicated); + ctx.CastToObject(mappedValueType); + ctx.Return(); + + ctx.MarkLabel(@null); + using (Compiler.Local typedVal = new Compiler.Local(ctx, mappedValueType)) + { + // create a new valueType + ctx.LoadAddress(typedVal, mappedValueType); + ctx.EmitCtor(mappedValueType); + ctx.LoadValue(typedVal); + ctx.LoadReaderWriter(); + ctx.EmitCall(dedicated); + ctx.CastToObject(mappedValueType); + ctx.Return(); + } + return boxedSerializer; + } + +#endif + //internal bool IsDefined(Type type, int fieldNumber) + //{ + // return FindWithoutAdd(type).IsDefined(fieldNumber); + //} + + // note that this is used by some of the unit tests + internal bool IsPrepared(Type type) + { + MetaType meta = FindWithoutAdd(type); + return meta != null && meta.IsPrepared(); + } + + internal EnumSerializer.EnumPair[] GetEnumMap(Type type) + { + int index = FindOrAddAuto(type, false, false, false); + return index < 0 ? null : ((MetaType)types[index]).GetEnumMap(); + } + + private int metadataTimeoutMilliseconds = 5000; + /// + /// The amount of time to wait if there are concurrent metadata access operations + /// + public int MetadataTimeoutMilliseconds + { + get { return metadataTimeoutMilliseconds; } + set + { + if (value <= 0) throw new ArgumentOutOfRangeException("MetadataTimeoutMilliseconds"); + metadataTimeoutMilliseconds = value; + } + } + +#if DEBUG + int lockCount; + /// + /// Gets how many times a model lock was taken + /// + public int LockCount { get { return lockCount; } } +#endif + internal void TakeLock(ref int opaqueToken) + { + const string message = "Timeout while inspecting metadata; this may indicate a deadlock. This can often be avoided by preparing necessary serializers during application initialization, rather than allowing multiple threads to perform the initial metadata inspection; please also see the LockContended event"; + opaqueToken = 0; +#if PORTABLE + if(!Monitor.TryEnter(types, metadataTimeoutMilliseconds)) throw new TimeoutException(message); + opaqueToken = Interlocked.CompareExchange(ref contentionCounter, 0, 0); // just fetch current value (starts at 1) +#elif CF2 || CF35 + int remaining = metadataTimeoutMilliseconds; + bool lockTaken; + do { + lockTaken = Monitor.TryEnter(types); + if(!lockTaken) + { + if(remaining <= 0) throw new TimeoutException(message); + remaining -= 50; + Thread.Sleep(50); + } + } while(!lockTaken); + opaqueToken = Interlocked.CompareExchange(ref contentionCounter, 0, 0); // just fetch current value (starts at 1) +#else + if (Monitor.TryEnter(types, metadataTimeoutMilliseconds)) + { + opaqueToken = GetContention(); // just fetch current value (starts at 1) + } + else + { + AddContention(); + + throw new TimeoutException(message); + } +#endif + +#if DEBUG // note that here, through all code-paths: we have the lock + lockCount++; +#endif + } + + private int contentionCounter = 1; +#if PLAT_NO_INTERLOCKED + private readonly object contentionLock = new object(); +#endif + private int GetContention() + { +#if PLAT_NO_INTERLOCKED + lock(contentionLock) + { + return contentionCounter; + } +#else + return Interlocked.CompareExchange(ref contentionCounter, 0, 0); +#endif + } + private void AddContention() + { +#if PLAT_NO_INTERLOCKED + lock(contentionLock) + { + contentionCounter++; + } +#else + Interlocked.Increment(ref contentionCounter); +#endif + } + + internal void ReleaseLock(int opaqueToken) + { + if (opaqueToken != 0) + { + Monitor.Exit(types); + if (opaqueToken != GetContention()) // contention-count changes since we looked! + { + LockContentedEventHandler handler = LockContended; + if (handler != null) + { + // not hugely elegant, but this is such a far-corner-case that it doesn't need to be slick - I'll settle for cross-platform + string stackTrace; + try + { + throw new ProtoException(); + } + catch (Exception ex) + { + stackTrace = ex.StackTrace; + } + + handler(this, new LockContentedEventArgs(stackTrace)); + } + } + } + } + /// + /// If a lock-contention is detected, this event signals the *owner* of the lock responsible for the blockage, indicating + /// what caused the problem; this is only raised if the lock-owning code successfully completes. + /// + public event LockContentedEventHandler LockContended; + + internal void ResolveListTypes(Type type, ref Type itemType, ref Type defaultType) + { + if (type == null) return; + if (Helpers.GetTypeCode(type) != ProtoTypeCode.Unknown) return; // don't try this[type] for inbuilts + + // handle arrays + if (type.IsArray) + { + if (type.GetArrayRank() != 1) + { + throw new NotSupportedException("Multi-dimension arrays are supported"); + } + itemType = type.GetElementType(); + if (itemType == MapType(typeof(byte))) + { + defaultType = itemType = null; + } + else + { + defaultType = type; + } + } + else + { + // if not an array, first check it isn't explicitly opted out + if (this[type].IgnoreListHandling) return; + } + + // handle lists + if (itemType == null) { itemType = TypeModel.GetListItemType(this, type); } + + // check for nested data (not allowed) + if (itemType != null) + { + Type nestedItemType = null, nestedDefaultType = null; + ResolveListTypes(itemType, ref nestedItemType, ref nestedDefaultType); + if (nestedItemType != null) + { + throw TypeModel.CreateNestedListsNotSupported(type); + } + } + + if (itemType != null && defaultType == null) + { +#if COREFX || PROFILE259 + TypeInfo typeInfo = IntrospectionExtensions.GetTypeInfo(type); + if (typeInfo.IsClass && !typeInfo.IsAbstract && Helpers.GetConstructor(typeInfo, Helpers.EmptyTypes, true) != null) +#else + if (type.IsClass && !type.IsAbstract && Helpers.GetConstructor(type, Helpers.EmptyTypes, true) != null) +#endif + { + defaultType = type; + } + if (defaultType == null) + { +#if COREFX || PROFILE259 + if (typeInfo.IsInterface) +#else + if (type.IsInterface) +#endif + { + + Type[] genArgs; +#if COREFX || PROFILE259 + if (typeInfo.IsGenericType && typeInfo.GetGenericTypeDefinition() == typeof(System.Collections.Generic.IDictionary<,>) + && itemType == typeof(System.Collections.Generic.KeyValuePair<,>).MakeGenericType(genArgs = typeInfo.GenericTypeArguments)) +#else + if (type.IsGenericType && type.GetGenericTypeDefinition() == MapType(typeof(System.Collections.Generic.IDictionary<,>)) + && itemType == MapType(typeof(System.Collections.Generic.KeyValuePair<,>)).MakeGenericType(genArgs = type.GetGenericArguments())) +#endif + { + defaultType = MapType(typeof(System.Collections.Generic.Dictionary<,>)).MakeGenericType(genArgs); + } + else + { + defaultType = MapType(typeof(System.Collections.Generic.List<>)).MakeGenericType(itemType); + } + } + } + // verify that the default type is appropriate + if (defaultType != null && !Helpers.IsAssignableFrom(type, defaultType)) { defaultType = null; } + } + } + + internal string GetSchemaTypeName(Type effectiveType, DataFormat dataFormat, bool asReference, bool dynamicType, ref CommonImports imports) + { + Type tmp = Helpers.GetUnderlyingType(effectiveType); + if (tmp != null) effectiveType = tmp; + + if (effectiveType == this.MapType(typeof(byte[]))) return "bytes"; + + WireType wireType; + IProtoSerializer ser = ValueMember.TryGetCoreSerializer(this, dataFormat, effectiveType, out wireType, false, false, false, false); + if (ser == null) + { // model type + if (asReference || dynamicType) + { + imports |= CommonImports.Bcl; + return ".bcl.NetObjectProxy"; + } + return this[effectiveType].GetSurrogateOrBaseOrSelf(true).GetSchemaTypeName(); + } + else + { + if (ser is ParseableSerializer) + { + if (asReference) imports |= CommonImports.Bcl; + return asReference ? ".bcl.NetObjectProxy" : "string"; + } + + switch (Helpers.GetTypeCode(effectiveType)) + { + case ProtoTypeCode.Boolean: return "bool"; + case ProtoTypeCode.Single: return "float"; + case ProtoTypeCode.Double: return "double"; + case ProtoTypeCode.String: + if (asReference) imports |= CommonImports.Bcl; + return asReference ? ".bcl.NetObjectProxy" : "string"; + case ProtoTypeCode.Byte: + case ProtoTypeCode.Char: + case ProtoTypeCode.UInt16: + case ProtoTypeCode.UInt32: + switch (dataFormat) + { + case DataFormat.FixedSize: return "fixed32"; + default: return "uint32"; + } + case ProtoTypeCode.SByte: + case ProtoTypeCode.Int16: + case ProtoTypeCode.Int32: + switch (dataFormat) + { + case DataFormat.ZigZag: return "sint32"; + case DataFormat.FixedSize: return "sfixed32"; + default: return "int32"; + } + case ProtoTypeCode.UInt64: + switch (dataFormat) + { + case DataFormat.FixedSize: return "fixed64"; + default: return "uint64"; + } + case ProtoTypeCode.Int64: + switch (dataFormat) + { + case DataFormat.ZigZag: return "sint64"; + case DataFormat.FixedSize: return "sfixed64"; + default: return "int64"; + } + case ProtoTypeCode.DateTime: + switch (dataFormat) + { + case DataFormat.FixedSize: return "sint64"; + case DataFormat.WellKnown: + imports |= CommonImports.Timestamp; + return ".google.protobuf.Timestamp"; + default: + imports |= CommonImports.Bcl; + return ".bcl.DateTime"; + } + case ProtoTypeCode.TimeSpan: + switch (dataFormat) + { + case DataFormat.FixedSize: return "sint64"; + case DataFormat.WellKnown: + imports |= CommonImports.Duration; + return ".google.protobuf.Duration"; + default: + imports |= CommonImports.Bcl; + return ".bcl.TimeSpan"; + } + case ProtoTypeCode.Decimal: imports |= CommonImports.Bcl; return ".bcl.Decimal"; + case ProtoTypeCode.Guid: imports |= CommonImports.Bcl; return ".bcl.Guid"; + case ProtoTypeCode.Type: return "string"; + default: throw new NotSupportedException("No .proto map found for: " + effectiveType.FullName); + } + } + + } + + /// + /// Designate a factory-method to use to create instances of any type; note that this only affect types seen by the serializer *after* setting the factory. + /// + public void SetDefaultFactory(MethodInfo methodInfo) + { + VerifyFactory(methodInfo, null); + defaultFactory = methodInfo; + } + private MethodInfo defaultFactory; + + internal void VerifyFactory(MethodInfo factory, Type type) + { + if (factory != null) + { + if (type != null && Helpers.IsValueType(type)) throw new InvalidOperationException(); + if (!factory.IsStatic) throw new ArgumentException("A factory-method must be static", "factory"); + if ((type != null && factory.ReturnType != type) && factory.ReturnType != MapType(typeof(object))) throw new ArgumentException("The factory-method must return object" + (type == null ? "" : (" or " + type.FullName)), "factory"); + + if (!CallbackSet.CheckCallbackParameters(this, factory)) throw new ArgumentException("Invalid factory signature in " + factory.DeclaringType.FullName + "." + factory.Name, "factory"); + } + } + + /// + /// Raised before a type is auto-configured; this allows the auto-configuration to be electively suppressed + /// + /// This callback should be fast and not involve complex external calls, as it may block the model + public event EventHandler BeforeApplyDefaultBehaviour; + + /// + /// Raised after a type is auto-configured; this allows additional external customizations + /// + /// This callback should be fast and not involve complex external calls, as it may block the model + public event EventHandler AfterApplyDefaultBehaviour; + + internal static void OnBeforeApplyDefaultBehaviour(MetaType metaType, ref TypeAddedEventArgs args) + => OnApplyDefaultBehaviour((metaType?.Model as RuntimeTypeModel)?.BeforeApplyDefaultBehaviour, metaType, ref args); + + internal static void OnAfterApplyDefaultBehaviour(MetaType metaType, ref TypeAddedEventArgs args) + => OnApplyDefaultBehaviour((metaType?.Model as RuntimeTypeModel)?.AfterApplyDefaultBehaviour, metaType, ref args); + + private static void OnApplyDefaultBehaviour( + EventHandler handler, MetaType metaType, ref TypeAddedEventArgs args) + { + if (handler != null) + { + if (args == null) args = new TypeAddedEventArgs(metaType); + handler(metaType.Model, args); + } + } + } + + /// + /// Contains the stack-trace of the owning code when a lock-contention scenario is detected + /// + public sealed class LockContentedEventArgs : EventArgs + { + private readonly string ownerStackTrace; + internal LockContentedEventArgs(string ownerStackTrace) + { + this.ownerStackTrace = ownerStackTrace; + } + + /// + /// The stack-trace of the code that owned the lock when a lock-contention scenario occurred + /// + public string OwnerStackTrace => ownerStackTrace; + } + /// + /// Event-type that is raised when a lock-contention scenario is detected + /// + public delegate void LockContentedEventHandler(object sender, LockContentedEventArgs args); +} +#endif diff --git a/Runtime/Protobuf-net/Meta/RuntimeTypeModel.cs.meta b/Runtime/Protobuf-net/Meta/RuntimeTypeModel.cs.meta new file mode 100644 index 0000000..231a028 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/RuntimeTypeModel.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 0e4440bfa9e92f84d81d48e6c5b0022e +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/SubType.cs b/Runtime/Protobuf-net/Meta/SubType.cs new file mode 100644 index 0000000..72c8126 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/SubType.cs @@ -0,0 +1,97 @@ +#if !NO_RUNTIME +using System; +using System.Collections.Generic; +using ProtoBuf.Serializers; + +namespace ProtoBuf.Meta +{ + /// + /// Represents an inherited type in a type hierarchy. + /// + public sealed class SubType + { + internal sealed class Comparer : System.Collections.IComparer, IComparer + { + public static readonly Comparer Default = new Comparer(); + + public int Compare(object x, object y) + { + return Compare(x as SubType, y as SubType); + } + + public int Compare(SubType x, SubType y) + { + if (ReferenceEquals(x, y)) return 0; + if (x == null) return -1; + if (y == null) return 1; + + return x.FieldNumber.CompareTo(y.FieldNumber); + } + } + + private int _fieldNumber; + + /// + /// The field-number that is used to encapsulate the data (as a nested + /// message) for the derived dype. + /// + public int FieldNumber + { + get => _fieldNumber; + internal set + { + if (_fieldNumber != value) + { + MetaType.AssertValidFieldNumber(value); + ThrowIfFrozen(); + _fieldNumber = value; + } + } + } + + private void ThrowIfFrozen() + { + if (serializer != null) throw new InvalidOperationException("The type cannot be changed once a serializer has been generated"); + } + + + /// + /// The sub-type to be considered. + /// + public MetaType DerivedType => derivedType; + private readonly MetaType derivedType; + + /// + /// Creates a new SubType instance. + /// + /// The field-number that is used to encapsulate the data (as a nested + /// message) for the derived dype. + /// The sub-type to be considered. + /// Specific encoding style to use; in particular, Grouped can be used to avoid buffering, but is not the default. + public SubType(int fieldNumber, MetaType derivedType, DataFormat format) + { + if (derivedType == null) throw new ArgumentNullException(nameof(derivedType)); + if (fieldNumber <= 0) throw new ArgumentOutOfRangeException(nameof(fieldNumber)); + _fieldNumber = fieldNumber; + this.derivedType = derivedType; + this.dataFormat = format; + } + + private readonly DataFormat dataFormat; + + private IProtoSerializer serializer; + + internal IProtoSerializer Serializer => serializer ?? (serializer = BuildSerializer()); + + private IProtoSerializer BuildSerializer() + { + // note the caller here is MetaType.BuildSerializer, which already has the sync-lock + WireType wireType = WireType.String; + if(dataFormat == DataFormat.Group) wireType = WireType.StartGroup; // only one exception + + IProtoSerializer ser = new SubItemSerializer(derivedType.Type, derivedType.GetKey(false, false), derivedType, false); + return new TagDecorator(_fieldNumber, wireType, false, ser); + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Meta/SubType.cs.meta b/Runtime/Protobuf-net/Meta/SubType.cs.meta new file mode 100644 index 0000000..fb7fe45 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/SubType.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a2912d37917b74846bdcffe3daa174d2 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/TypeAddedEventArgs.cs b/Runtime/Protobuf-net/Meta/TypeAddedEventArgs.cs new file mode 100644 index 0000000..399c638 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/TypeAddedEventArgs.cs @@ -0,0 +1,33 @@ +using System; + +namespace ProtoBuf.Meta +{ + /// + /// Event data associated with new types being added to a model + /// + public sealed class TypeAddedEventArgs : EventArgs + { + internal TypeAddedEventArgs(MetaType metaType) + { + MetaType = metaType; + ApplyDefaultBehaviour = true; + } + + /// + /// Whether or not to apply the default mapping behavior + /// + public bool ApplyDefaultBehaviour { get; set; } + /// + /// The configuration of the type being added + /// + public MetaType MetaType { get; } + /// + /// The type that was added to the model + /// + public Type Type => MetaType.Type; + /// + /// The model that is being changed + /// + public RuntimeTypeModel Model => MetaType.Model as RuntimeTypeModel; + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/Meta/TypeAddedEventArgs.cs.meta b/Runtime/Protobuf-net/Meta/TypeAddedEventArgs.cs.meta new file mode 100644 index 0000000..8ac9b8f --- /dev/null +++ b/Runtime/Protobuf-net/Meta/TypeAddedEventArgs.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 1500030a10d2168408f75fe907ce0568 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/TypeFormatEventArgs.cs b/Runtime/Protobuf-net/Meta/TypeFormatEventArgs.cs new file mode 100644 index 0000000..3db0999 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/TypeFormatEventArgs.cs @@ -0,0 +1,64 @@ +using System; + +namespace ProtoBuf.Meta +{ + /// + /// Event arguments needed to perform type-formatting functions; this could be resolving a Type to a string suitable for serialization, or could + /// be requesting a Type from a string. If no changes are made, a default implementation will be used (from the assembly-qualified names). + /// + public class TypeFormatEventArgs : EventArgs + { + private Type type; + private string formattedName; + private readonly bool typeFixed; + /// + /// The type involved in this map; if this is initially null, a Type is expected to be provided for the string in FormattedName. + /// + public Type Type + { + get { return type; } + set + { + if (type != value) + { + if (typeFixed) throw new InvalidOperationException("The type is fixed and cannot be changed"); + type = value; + } + } + } + + /// + /// The formatted-name involved in this map; if this is initially null, a formatted-name is expected from the type in Type. + /// + public string FormattedName + { + get { return formattedName; } + set + { + if (formattedName != value) + { + if (!typeFixed) throw new InvalidOperationException("The formatted-name is fixed and cannot be changed"); + formattedName = value; + } + } + } + + internal TypeFormatEventArgs(string formattedName) + { + if (string.IsNullOrEmpty(formattedName)) throw new ArgumentNullException("formattedName"); + this.formattedName = formattedName; + // typeFixed = false; <== implicit + } + + internal TypeFormatEventArgs(Type type) + { + this.type = type ?? throw new ArgumentNullException(nameof(type)); + typeFixed = true; + } + } + + /// + /// Delegate type used to perform type-formatting functions; the sender originates as the type-model. + /// + public delegate void TypeFormatEventHandler(object sender, TypeFormatEventArgs args); +} diff --git a/Runtime/Protobuf-net/Meta/TypeFormatEventArgs.cs.meta b/Runtime/Protobuf-net/Meta/TypeFormatEventArgs.cs.meta new file mode 100644 index 0000000..a21c2ab --- /dev/null +++ b/Runtime/Protobuf-net/Meta/TypeFormatEventArgs.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: d27afe6e96660d1418a49cf374e84ad0 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/TypeModel.InputOutput.cs b/Runtime/Protobuf-net/Meta/TypeModel.InputOutput.cs new file mode 100644 index 0000000..9b023a6 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/TypeModel.InputOutput.cs @@ -0,0 +1,45 @@ +using System; +using System.IO; + +namespace ProtoBuf.Meta +{ + partial class TypeModel : + IProtoInput, + IProtoInput>, + IProtoInput, + IProtoOutput + { + static SerializationContext CreateContext(object userState) + { + if (userState == null) + return SerializationContext.Default; + if (userState is SerializationContext ctx) + return ctx; + + var obj = new SerializationContext { Context = userState }; + obj.Freeze(); + return obj; + } + T IProtoInput.Deserialize(Stream source, T value, object userState) + => (T)Deserialize(source, value, typeof(T), CreateContext(userState)); + + T IProtoInput>.Deserialize(ArraySegment source, T value, object userState) + { + using (var ms = new MemoryStream(source.Array, source.Offset, source.Count)) + { + return (T)Deserialize(ms, value, typeof(T), CreateContext(userState)); + } + } + + T IProtoInput.Deserialize(byte[] source, T value, object userState) + { + using (var ms = new MemoryStream(source)) + { + return (T)Deserialize(ms, value, typeof(T), CreateContext(userState)); + } + } + + void IProtoOutput.Serialize(Stream destination, T value, object userState) + => Serialize(destination, value, CreateContext(userState)); + } +} diff --git a/Runtime/Protobuf-net/Meta/TypeModel.InputOutput.cs.meta b/Runtime/Protobuf-net/Meta/TypeModel.InputOutput.cs.meta new file mode 100644 index 0000000..80015e5 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/TypeModel.InputOutput.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: d683bc55be70e8e46824012108beb15f +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/TypeModel.cs b/Runtime/Protobuf-net/Meta/TypeModel.cs new file mode 100644 index 0000000..1867cf2 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/TypeModel.cs @@ -0,0 +1,1696 @@ +using System; +using System.IO; + +using System.Collections; +using System.Collections.Generic; +using System.Reflection; + +namespace ProtoBuf.Meta +{ + /// + /// Provides protobuf serialization support for a number of types + /// + public abstract partial class TypeModel + { +#if COREFX + internal TypeInfo MapType(TypeInfo type) + { + return type; + } +#endif + + /// + /// Should the Kind be included on date/time values? + /// + protected internal virtual bool SerializeDateTimeKind() { return false; } + + /// + /// Resolve a System.Type to the compiler-specific type + /// + protected internal Type MapType(Type type) + { + return MapType(type, true); + } + /// + /// Resolve a System.Type to the compiler-specific type + /// + protected internal virtual Type MapType(Type type, bool demand) + { + return type; + } + + private WireType GetWireType(ProtoTypeCode code, DataFormat format, ref Type type, out int modelKey) + { + modelKey = -1; + if (Helpers.IsEnum(type)) + { + modelKey = GetKey(ref type); + return WireType.Variant; + } + switch (code) + { + case ProtoTypeCode.Int64: + case ProtoTypeCode.UInt64: + return format == DataFormat.FixedSize ? WireType.Fixed64 : WireType.Variant; + case ProtoTypeCode.Int16: + case ProtoTypeCode.Int32: + case ProtoTypeCode.UInt16: + case ProtoTypeCode.UInt32: + case ProtoTypeCode.Boolean: + case ProtoTypeCode.SByte: + case ProtoTypeCode.Byte: + case ProtoTypeCode.Char: + return format == DataFormat.FixedSize ? WireType.Fixed32 : WireType.Variant; + case ProtoTypeCode.Double: + return WireType.Fixed64; + case ProtoTypeCode.Single: + return WireType.Fixed32; + case ProtoTypeCode.String: + case ProtoTypeCode.DateTime: + case ProtoTypeCode.Decimal: + case ProtoTypeCode.ByteArray: + case ProtoTypeCode.TimeSpan: + case ProtoTypeCode.Guid: + case ProtoTypeCode.Uri: + return WireType.String; + } + + if ((modelKey = GetKey(ref type)) >= 0) + { + return WireType.String; + } + return WireType.None; + } + + + /// + /// This is the more "complete" version of Serialize, which handles single instances of mapped types. + /// The value is written as a complete field, including field-header and (for sub-objects) a + /// length-prefix + /// In addition to that, this provides support for: + /// - basic values; individual int / string / Guid / etc + /// - IEnumerable sequences of any type handled by TrySerializeAuxiliaryType + /// + /// + internal bool TrySerializeAuxiliaryType(ProtoWriter writer, Type type, DataFormat format, int tag, object value, bool isInsideList, object parentList) + { + if (type == null) { type = value.GetType(); } + + ProtoTypeCode typecode = Helpers.GetTypeCode(type); + // note the "ref type" here normalizes against proxies + WireType wireType = GetWireType(typecode, format, ref type, out int modelKey); + + + if (modelKey >= 0) + { // write the header, but defer to the model + if (Helpers.IsEnum(type)) + { // no header + Serialize(modelKey, value, writer); + return true; + } + else + { + ProtoWriter.WriteFieldHeader(tag, wireType, writer); + switch (wireType) + { + case WireType.None: + throw ProtoWriter.CreateException(writer); + case WireType.StartGroup: + case WireType.String: + // needs a wrapping length etc + SubItemToken token = ProtoWriter.StartSubItem(value, writer); + Serialize(modelKey, value, writer); + ProtoWriter.EndSubItem(token, writer); + return true; + default: + Serialize(modelKey, value, writer); + return true; + } + } + } + + if (wireType != WireType.None) + { + ProtoWriter.WriteFieldHeader(tag, wireType, writer); + } + switch (typecode) + { + case ProtoTypeCode.Int16: ProtoWriter.WriteInt16((short)value, writer); return true; + case ProtoTypeCode.Int32: ProtoWriter.WriteInt32((int)value, writer); return true; + case ProtoTypeCode.Int64: ProtoWriter.WriteInt64((long)value, writer); return true; + case ProtoTypeCode.UInt16: ProtoWriter.WriteUInt16((ushort)value, writer); return true; + case ProtoTypeCode.UInt32: ProtoWriter.WriteUInt32((uint)value, writer); return true; + case ProtoTypeCode.UInt64: ProtoWriter.WriteUInt64((ulong)value, writer); return true; + case ProtoTypeCode.Boolean: ProtoWriter.WriteBoolean((bool)value, writer); return true; + case ProtoTypeCode.SByte: ProtoWriter.WriteSByte((sbyte)value, writer); return true; + case ProtoTypeCode.Byte: ProtoWriter.WriteByte((byte)value, writer); return true; + case ProtoTypeCode.Char: ProtoWriter.WriteUInt16((ushort)(char)value, writer); return true; + case ProtoTypeCode.Double: ProtoWriter.WriteDouble((double)value, writer); return true; + case ProtoTypeCode.Single: ProtoWriter.WriteSingle((float)value, writer); return true; + case ProtoTypeCode.DateTime: + if (SerializeDateTimeKind()) + BclHelpers.WriteDateTimeWithKind((DateTime)value, writer); + else + BclHelpers.WriteDateTime((DateTime)value, writer); + return true; + case ProtoTypeCode.Decimal: BclHelpers.WriteDecimal((decimal)value, writer); return true; + case ProtoTypeCode.String: ProtoWriter.WriteString((string)value, writer); return true; + case ProtoTypeCode.ByteArray: ProtoWriter.WriteBytes((byte[])value, writer); return true; + case ProtoTypeCode.TimeSpan: BclHelpers.WriteTimeSpan((TimeSpan)value, writer); return true; + case ProtoTypeCode.Guid: BclHelpers.WriteGuid((Guid)value, writer); return true; + case ProtoTypeCode.Uri: ProtoWriter.WriteString(((Uri)value).OriginalString, writer); return true; + } + + // by now, we should have covered all the simple cases; if we wrote a field-header, we have + // forgotten something! + Helpers.DebugAssert(wireType == WireType.None); + + // now attempt to handle sequences (including arrays and lists) + if (value is IEnumerable sequence) + { + if (isInsideList) throw CreateNestedListsNotSupported(parentList?.GetType()); + foreach (object item in sequence) + { + if (item == null) { throw new NullReferenceException(); } + if (!TrySerializeAuxiliaryType(writer, null, format, tag, item, true, sequence)) + { + ThrowUnexpectedType(item.GetType()); + } + } + return true; + } + return false; + } + + private void SerializeCore(ProtoWriter writer, object value) + { + if (value == null) throw new ArgumentNullException(nameof(value)); + Type type = value.GetType(); + int key = GetKey(ref type); + if (key >= 0) + { + Serialize(key, value, writer); + } + else if (!TrySerializeAuxiliaryType(writer, type, DataFormat.Default, Serializer.ListItemTag, value, false, null)) + { + ThrowUnexpectedType(type); + } + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream. + /// + /// The existing instance to be serialized (cannot be null). + /// The destination stream to write to. + public void Serialize(Stream dest, object value) + { + Serialize(dest, value, null); + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream. + /// + /// The existing instance to be serialized (cannot be null). + /// The destination stream to write to. + /// Additional information about this serialization operation. + public void Serialize(Stream dest, object value, SerializationContext context) + { + using (ProtoWriter writer = ProtoWriter.Create(dest, this, context)) + { + writer.SetRootObject(value); + SerializeCore(writer, value); + writer.Close(); + } + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied writer. + /// + /// The existing instance to be serialized (cannot be null). + /// The destination writer to write to. + public void Serialize(ProtoWriter dest, object value) + { + if (dest == null) throw new ArgumentNullException(nameof(dest)); + dest.CheckDepthFlushlock(); + dest.SetRootObject(value); + SerializeCore(dest, value); + dest.CheckDepthFlushlock(); + ProtoWriter.Flush(dest); + } + + /// + /// Applies a protocol-buffer stream to an existing instance (or null), using length-prefixed + /// data - useful with network IO. + /// + /// The type being merged. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// How to encode the length prefix. + /// The tag used as a prefix to each record (only used with base-128 style prefixes). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public object DeserializeWithLengthPrefix(Stream source, object value, Type type, PrefixStyle style, int fieldNumber) + => DeserializeWithLengthPrefix(source, value, type, style, fieldNumber, null, out long bytesRead); + + /// + /// Applies a protocol-buffer stream to an existing instance (or null), using length-prefixed + /// data - useful with network IO. + /// + /// The type being merged. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// How to encode the length prefix. + /// The tag used as a prefix to each record (only used with base-128 style prefixes). + /// Used to resolve types on a per-field basis. + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public object DeserializeWithLengthPrefix(Stream source, object value, Type type, PrefixStyle style, int expectedField, Serializer.TypeResolver resolver) + => DeserializeWithLengthPrefix(source, value, type, style, expectedField, resolver, out long bytesRead); + + /// + /// Applies a protocol-buffer stream to an existing instance (or null), using length-prefixed + /// data - useful with network IO. + /// + /// The type being merged. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// How to encode the length prefix. + /// The tag used as a prefix to each record (only used with base-128 style prefixes). + /// Used to resolve types on a per-field basis. + /// Returns the number of bytes consumed by this operation (includes length-prefix overheads and any skipped data). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public object DeserializeWithLengthPrefix(Stream source, object value, Type type, PrefixStyle style, int expectedField, Serializer.TypeResolver resolver, out int bytesRead) + { + object result = DeserializeWithLengthPrefix(source, value, type, style, expectedField, resolver, out long bytesRead64, out bool haveObject, null); + bytesRead = checked((int)bytesRead64); + return result; + } + + /// + /// Applies a protocol-buffer stream to an existing instance (or null), using length-prefixed + /// data - useful with network IO. + /// + /// The type being merged. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// How to encode the length prefix. + /// The tag used as a prefix to each record (only used with base-128 style prefixes). + /// Used to resolve types on a per-field basis. + /// Returns the number of bytes consumed by this operation (includes length-prefix overheads and any skipped data). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public object DeserializeWithLengthPrefix(Stream source, object value, Type type, PrefixStyle style, int expectedField, Serializer.TypeResolver resolver, out long bytesRead) => DeserializeWithLengthPrefix(source, value, type, style, expectedField, resolver, out bytesRead, out bool haveObject, null); + + private object DeserializeWithLengthPrefix(Stream source, object value, Type type, PrefixStyle style, int expectedField, Serializer.TypeResolver resolver, out long bytesRead, out bool haveObject, SerializationContext context) + { + haveObject = false; + bool skip; + long len; + bytesRead = 0; + if (type == null && (style != PrefixStyle.Base128 || resolver == null)) + { + throw new InvalidOperationException("A type must be provided unless base-128 prefixing is being used in combination with a resolver"); + } + do + { + + bool expectPrefix = expectedField > 0 || resolver != null; + len = ProtoReader.ReadLongLengthPrefix(source, expectPrefix, style, out int actualField, out int tmpBytesRead); + if (tmpBytesRead == 0) return value; + bytesRead += tmpBytesRead; + if (len < 0) return value; + + switch (style) + { + case PrefixStyle.Base128: + if (expectPrefix && expectedField == 0 && type == null && resolver != null) + { + type = resolver(actualField); + skip = type == null; + } + else { skip = expectedField != actualField; } + break; + default: + skip = false; + break; + } + + if (skip) + { + if (len == long.MaxValue) throw new InvalidOperationException(); + ProtoReader.Seek(source, len, null); + bytesRead += len; + } + } while (skip); + + ProtoReader reader = null; + try + { + reader = ProtoReader.Create(source, this, context, len); + int key = GetKey(ref type); + if (key >= 0 && !Helpers.IsEnum(type)) + { + value = Deserialize(key, value, reader); + } + else + { + if (!(TryDeserializeAuxiliaryType(reader, DataFormat.Default, Serializer.ListItemTag, type, ref value, true, false, true, false, null) || len == 0)) + { + TypeModel.ThrowUnexpectedType(type); // throws + } + } + bytesRead += reader.LongPosition; + haveObject = true; + return value; + } + finally + { + ProtoReader.Recycle(reader); + } + } + + /// + /// Reads a sequence of consecutive length-prefixed items from a stream, using + /// either base-128 or fixed-length prefixes. Base-128 prefixes with a tag + /// are directly comparable to serializing multiple items in succession + /// (use the tag to emulate the implicit behavior + /// when serializing a list/array). When a tag is + /// specified, any records with different tags are silently omitted. The + /// tag is ignored. The tag is ignores for fixed-length prefixes. + /// + /// The binary stream containing the serialized records. + /// The prefix style used in the data. + /// The tag of records to return (if non-positive, then no tag is + /// expected and all records are returned). + /// On a field-by-field basis, the type of object to deserialize (can be null if "type" is specified). + /// The type of object to deserialize (can be null if "resolver" is specified). + /// The sequence of deserialized objects. + public IEnumerable DeserializeItems(System.IO.Stream source, Type type, PrefixStyle style, int expectedField, Serializer.TypeResolver resolver) + { + return DeserializeItems(source, type, style, expectedField, resolver, null); + } + /// + /// Reads a sequence of consecutive length-prefixed items from a stream, using + /// either base-128 or fixed-length prefixes. Base-128 prefixes with a tag + /// are directly comparable to serializing multiple items in succession + /// (use the tag to emulate the implicit behavior + /// when serializing a list/array). When a tag is + /// specified, any records with different tags are silently omitted. The + /// tag is ignored. The tag is ignores for fixed-length prefixes. + /// + /// The binary stream containing the serialized records. + /// The prefix style used in the data. + /// The tag of records to return (if non-positive, then no tag is + /// expected and all records are returned). + /// On a field-by-field basis, the type of object to deserialize (can be null if "type" is specified). + /// The type of object to deserialize (can be null if "resolver" is specified). + /// The sequence of deserialized objects. + /// Additional information about this serialization operation. + public IEnumerable DeserializeItems(System.IO.Stream source, Type type, PrefixStyle style, int expectedField, Serializer.TypeResolver resolver, SerializationContext context) + { + return new DeserializeItemsIterator(this, source, type, style, expectedField, resolver, context); + } + + /// + /// Reads a sequence of consecutive length-prefixed items from a stream, using + /// either base-128 or fixed-length prefixes. Base-128 prefixes with a tag + /// are directly comparable to serializing multiple items in succession + /// (use the tag to emulate the implicit behavior + /// when serializing a list/array). When a tag is + /// specified, any records with different tags are silently omitted. The + /// tag is ignored. The tag is ignores for fixed-length prefixes. + /// + /// The type of object to deserialize. + /// The binary stream containing the serialized records. + /// The prefix style used in the data. + /// The tag of records to return (if non-positive, then no tag is + /// expected and all records are returned). + /// The sequence of deserialized objects. + public IEnumerable DeserializeItems(Stream source, PrefixStyle style, int expectedField) + { + return DeserializeItems(source, style, expectedField, null); + } + /// + /// Reads a sequence of consecutive length-prefixed items from a stream, using + /// either base-128 or fixed-length prefixes. Base-128 prefixes with a tag + /// are directly comparable to serializing multiple items in succession + /// (use the tag to emulate the implicit behavior + /// when serializing a list/array). When a tag is + /// specified, any records with different tags are silently omitted. The + /// tag is ignored. The tag is ignores for fixed-length prefixes. + /// + /// The type of object to deserialize. + /// The binary stream containing the serialized records. + /// The prefix style used in the data. + /// The tag of records to return (if non-positive, then no tag is + /// expected and all records are returned). + /// The sequence of deserialized objects. + /// Additional information about this serialization operation. + public IEnumerable DeserializeItems(Stream source, PrefixStyle style, int expectedField, SerializationContext context) + { + return new DeserializeItemsIterator(this, source, style, expectedField, context); + } + + private sealed class DeserializeItemsIterator : DeserializeItemsIterator, + IEnumerator, + IEnumerable + { + IEnumerator IEnumerable.GetEnumerator() { return this; } + public new T Current { get { return (T)base.Current; } } + void IDisposable.Dispose() { } + public DeserializeItemsIterator(TypeModel model, Stream source, PrefixStyle style, int expectedField, SerializationContext context) + : base(model, source, model.MapType(typeof(T)), style, expectedField, null, context) { } + } + + private class DeserializeItemsIterator : IEnumerator, IEnumerable + { + IEnumerator IEnumerable.GetEnumerator() { return this; } + private bool haveObject; + private object current; + public bool MoveNext() + { + if (haveObject) + { + current = model.DeserializeWithLengthPrefix(source, null, type, style, expectedField, resolver, out long bytesRead, out haveObject, context); + } + return haveObject; + } + void IEnumerator.Reset() { throw new NotSupportedException(); } + public object Current { get { return current; } } + private readonly Stream source; + private readonly Type type; + private readonly PrefixStyle style; + private readonly int expectedField; + private readonly Serializer.TypeResolver resolver; + private readonly TypeModel model; + private readonly SerializationContext context; + public DeserializeItemsIterator(TypeModel model, Stream source, Type type, PrefixStyle style, int expectedField, Serializer.TypeResolver resolver, SerializationContext context) + { + haveObject = true; + this.source = source; + this.type = type; + this.style = style; + this.expectedField = expectedField; + this.resolver = resolver; + this.model = model; + this.context = context; + } + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream, + /// with a length-prefix. This is useful for socket programming, + /// as DeserializeWithLengthPrefix can be used to read the single object back + /// from an ongoing stream. + /// + /// The type being serialized. + /// The existing instance to be serialized (cannot be null). + /// How to encode the length prefix. + /// The destination stream to write to. + /// The tag used as a prefix to each record (only used with base-128 style prefixes). + public void SerializeWithLengthPrefix(Stream dest, object value, Type type, PrefixStyle style, int fieldNumber) + { + SerializeWithLengthPrefix(dest, value, type, style, fieldNumber, null); + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream, + /// with a length-prefix. This is useful for socket programming, + /// as DeserializeWithLengthPrefix can be used to read the single object back + /// from an ongoing stream. + /// + /// The type being serialized. + /// The existing instance to be serialized (cannot be null). + /// How to encode the length prefix. + /// The destination stream to write to. + /// The tag used as a prefix to each record (only used with base-128 style prefixes). + /// Additional information about this serialization operation. + public void SerializeWithLengthPrefix(Stream dest, object value, Type type, PrefixStyle style, int fieldNumber, SerializationContext context) + { + if (type == null) + { + if (value == null) throw new ArgumentNullException(nameof(value)); + type = MapType(value.GetType()); + } + int key = GetKey(ref type); + using (ProtoWriter writer = ProtoWriter.Create(dest, this, context)) + { + switch (style) + { + case PrefixStyle.None: + Serialize(key, value, writer); + break; + case PrefixStyle.Base128: + case PrefixStyle.Fixed32: + case PrefixStyle.Fixed32BigEndian: + ProtoWriter.WriteObject(value, key, writer, style, fieldNumber); + break; + default: + throw new ArgumentOutOfRangeException("style"); + } + writer.Close(); + } + } + /// + /// Applies a protocol-buffer stream to an existing instance (which may be null). + /// + /// The type (including inheritance) to consider. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public object Deserialize(Stream source, object value, Type type) + { + return Deserialize(source, value, type, null); + } + + /// + /// Applies a protocol-buffer stream to an existing instance (which may be null). + /// + /// The type (including inheritance) to consider. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + /// Additional information about this serialization operation. + public object Deserialize(Stream source, object value, Type type, SerializationContext context) + { + bool autoCreate = PrepareDeserialize(value, ref type); + ProtoReader reader = null; + try + { + reader = ProtoReader.Create(source, this, context, ProtoReader.TO_EOF); + if (value != null) reader.SetRootObject(value); + object obj = DeserializeCore(reader, type, value, autoCreate); + reader.CheckFullyConsumed(); + return obj; + } + finally + { + ProtoReader.Recycle(reader); + } + } + + private bool PrepareDeserialize(object value, ref Type type) + { + if (type == null) + { + if (value == null) + { + throw new ArgumentNullException(nameof(type)); + } + else + { + type = MapType(value.GetType()); + } + } + + bool autoCreate = true; + Type underlyingType = Helpers.GetUnderlyingType(type); + if (underlyingType != null) + { + type = underlyingType; + autoCreate = false; + } + return autoCreate; + } + + /// + /// Applies a protocol-buffer stream to an existing instance (which may be null). + /// + /// The type (including inheritance) to consider. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// The number of bytes to consume. + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public object Deserialize(Stream source, object value, System.Type type, int length) + => Deserialize(source, value, type, length, null); + + /// + /// Applies a protocol-buffer stream to an existing instance (which may be null). + /// + /// The type (including inheritance) to consider. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// The number of bytes to consume. + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public object Deserialize(Stream source, object value, System.Type type, long length) + => Deserialize(source, value, type, length, null); + + /// + /// Applies a protocol-buffer stream to an existing instance (which may be null). + /// + /// The type (including inheritance) to consider. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// The number of bytes to consume (or -1 to read to the end of the stream). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + /// Additional information about this serialization operation. + public object Deserialize(Stream source, object value, System.Type type, int length, SerializationContext context) + => Deserialize(source, value, type, length == int.MaxValue ? long.MaxValue : (long)length, context); + + /// + /// Applies a protocol-buffer stream to an existing instance (which may be null). + /// + /// The type (including inheritance) to consider. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// The number of bytes to consume (or -1 to read to the end of the stream). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + /// Additional information about this serialization operation. + public object Deserialize(Stream source, object value, System.Type type, long length, SerializationContext context) + { + bool autoCreate = PrepareDeserialize(value, ref type); + ProtoReader reader = null; + try + { + reader = ProtoReader.Create(source, this, context, length); + if (value != null) reader.SetRootObject(value); + object obj = DeserializeCore(reader, type, value, autoCreate); + reader.CheckFullyConsumed(); + return obj; + } + finally + { + ProtoReader.Recycle(reader); + } + } + + /// + /// Applies a protocol-buffer reader to an existing instance (which may be null). + /// + /// The type (including inheritance) to consider. + /// The existing instance to be modified (can be null). + /// The reader to apply to the instance (cannot be null). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public object Deserialize(ProtoReader source, object value, System.Type type) + { + if (source == null) throw new ArgumentNullException("source"); + bool autoCreate = PrepareDeserialize(value, ref type); + if (value != null) source.SetRootObject(value); + object obj = DeserializeCore(source, type, value, autoCreate); + source.CheckFullyConsumed(); + return obj; + } + + private object DeserializeCore(ProtoReader reader, Type type, object value, bool noAutoCreate) + { + int key = GetKey(ref type); + if (key >= 0 && !Helpers.IsEnum(type)) + { + return Deserialize(key, value, reader); + } + // this returns true to say we actively found something, but a value is assigned either way (or throws) + TryDeserializeAuxiliaryType(reader, DataFormat.Default, Serializer.ListItemTag, type, ref value, true, false, noAutoCreate, false, null); + return value; + } + +#if COREFX + private static readonly System.Reflection.TypeInfo ilist = typeof(IList).GetTypeInfo(); +#else + private static readonly System.Type ilist = typeof(IList); +#endif + internal static MethodInfo ResolveListAdd(TypeModel model, Type listType, Type itemType, out bool isList) + { +#if COREFX || PROFILE259 + TypeInfo listTypeInfo = listType.GetTypeInfo(); +#else + Type listTypeInfo = listType; +#endif +#if PROFILE259 + isList = model.MapType(ilist).GetTypeInfo().IsAssignableFrom(listTypeInfo); +#else + isList = model.MapType(ilist).IsAssignableFrom(listTypeInfo); +#endif + Type[] types = { itemType }; + MethodInfo add = Helpers.GetInstanceMethod(listTypeInfo, "Add", types); + +#if !NO_GENERICS + if (add == null) + { // fallback: look for ICollection's Add(typedObject) method + + bool forceList = listTypeInfo.IsInterface && + model.MapType(typeof(System.Collections.Generic.IEnumerable<>)).MakeGenericType(types) +#if COREFX || PROFILE259 + .GetTypeInfo() +#endif + .IsAssignableFrom(listTypeInfo); + +#if COREFX || PROFILE259 + TypeInfo constuctedListType = typeof(System.Collections.Generic.ICollection<>).MakeGenericType(types).GetTypeInfo(); +#else + Type constuctedListType = model.MapType(typeof(System.Collections.Generic.ICollection<>)).MakeGenericType(types); +#endif + if (forceList || constuctedListType.IsAssignableFrom(listTypeInfo)) + { + add = Helpers.GetInstanceMethod(constuctedListType, "Add", types); + } + } + + if (add == null) + { + +#if COREFX || PROFILE259 + foreach (Type tmpType in listTypeInfo.ImplementedInterfaces) +#else + foreach (Type interfaceType in listTypeInfo.GetInterfaces()) +#endif + { +#if COREFX || PROFILE259 + TypeInfo interfaceType = tmpType.GetTypeInfo(); +#endif + if (interfaceType.Name == "IProducerConsumerCollection`1" && interfaceType.IsGenericType && interfaceType.GetGenericTypeDefinition().FullName == "System.Collections.Concurrent.IProducerConsumerCollection`1") + { + add = Helpers.GetInstanceMethod(interfaceType, "TryAdd", types); + if (add != null) break; + } + } + } +#endif + + if (add == null) + { // fallback: look for a public list.Add(object) method + types[0] = model.MapType(typeof(object)); + add = Helpers.GetInstanceMethod(listTypeInfo, "Add", types); + } + if (add == null && isList) + { // fallback: look for IList's Add(object) method + add = Helpers.GetInstanceMethod(model.MapType(ilist), "Add", types); + } + return add; + } + internal static Type GetListItemType(TypeModel model, Type listType) + { + Helpers.DebugAssert(listType != null); + +#if PROFILE259 + TypeInfo listTypeInfo = listType.GetTypeInfo(); + if (listType == typeof(string) || listType.IsArray + || !typeof(IEnumerable).GetTypeInfo().IsAssignableFrom(listTypeInfo)) return null; +#else + if (listType == model.MapType(typeof(string)) || listType.IsArray + || !model.MapType(typeof(IEnumerable)).IsAssignableFrom(listType)) return null; +#endif + + BasicList candidates = new BasicList(); +#if PROFILE259 + foreach (MethodInfo method in listType.GetRuntimeMethods()) +#else + foreach (MethodInfo method in listType.GetMethods()) +#endif + { + if (method.IsStatic || method.Name != "Add") continue; + ParameterInfo[] parameters = method.GetParameters(); + Type paramType; + if (parameters.Length == 1 && !candidates.Contains(paramType = parameters[0].ParameterType)) + { + candidates.Add(paramType); + } + } + + string name = listType.Name; + bool isQueueStack = name != null && (name.IndexOf("Queue") >= 0 || name.IndexOf("Stack") >= 0); + + if (!isQueueStack) + { + TestEnumerableListPatterns(model, candidates, listType); +#if PROFILE259 + foreach (Type iType in listTypeInfo.ImplementedInterfaces) + { + TestEnumerableListPatterns(model, candidates, iType); + } +#else + foreach (Type iType in listType.GetInterfaces()) + { + TestEnumerableListPatterns(model, candidates, iType); + } +#endif + } + +#if PROFILE259 + // more convenient GetProperty overload not supported on all platforms + foreach (PropertyInfo indexer in listType.GetRuntimeProperties()) + { + if (indexer.Name != "Item" || candidates.Contains(indexer.PropertyType)) continue; + ParameterInfo[] args = indexer.GetIndexParameters(); + if (args.Length != 1 || args[0].ParameterType != typeof(int)) continue; + MethodInfo getter = indexer.GetMethod; + if (getter == null || getter.IsStatic) continue; + candidates.Add(indexer.PropertyType); + } +#else + // more convenient GetProperty overload not supported on all platforms + foreach (PropertyInfo indexer in listType.GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)) + { + if (indexer.Name != "Item" || candidates.Contains(indexer.PropertyType)) continue; + ParameterInfo[] args = indexer.GetIndexParameters(); + if (args.Length != 1 || args[0].ParameterType != model.MapType(typeof(int))) continue; + candidates.Add(indexer.PropertyType); + } +#endif + + switch (candidates.Count) + { + case 0: + return null; + case 1: + if ((Type)candidates[0] == listType) return null; // recursive + return (Type)candidates[0]; + case 2: + if ((Type)candidates[0] != listType && CheckDictionaryAccessors(model, (Type)candidates[0], (Type)candidates[1])) return (Type)candidates[0]; + if ((Type)candidates[1] != listType && CheckDictionaryAccessors(model, (Type)candidates[1], (Type)candidates[0])) return (Type)candidates[1]; + break; + } + + return null; + } + + private static void TestEnumerableListPatterns(TypeModel model, BasicList candidates, Type iType) + { + +#if COREFX || PROFILE259 + TypeInfo iTypeInfo = iType.GetTypeInfo(); + if (iTypeInfo.IsGenericType) + { + Type typeDef = iTypeInfo.GetGenericTypeDefinition(); + if( + typeDef == model.MapType(typeof(System.Collections.Generic.IEnumerable<>)) + || typeDef == model.MapType(typeof(System.Collections.Generic.ICollection<>)) + || typeDef.GetTypeInfo().FullName == "System.Collections.Concurrent.IProducerConsumerCollection`1") + { + + Type[] iTypeArgs = iTypeInfo.GenericTypeArguments; + if (!candidates.Contains(iTypeArgs[0])) + { + candidates.Add(iTypeArgs[0]); + } + } + } +#else + if (iType.IsGenericType) + { + Type typeDef = iType.GetGenericTypeDefinition(); + if (typeDef == model.MapType(typeof(System.Collections.Generic.IEnumerable<>)) + || typeDef == model.MapType(typeof(System.Collections.Generic.ICollection<>)) + || typeDef.FullName == "System.Collections.Concurrent.IProducerConsumerCollection`1") + { + Type[] iTypeArgs = iType.GetGenericArguments(); + if (!candidates.Contains(iTypeArgs[0])) + { + candidates.Add(iTypeArgs[0]); + } + } + } +#endif + } + + private static bool CheckDictionaryAccessors(TypeModel model, Type pair, Type value) + { +#if COREFX || PROFILE259 + TypeInfo finalType = pair.GetTypeInfo(); + return finalType.IsGenericType && finalType.GetGenericTypeDefinition() == typeof(System.Collections.Generic.KeyValuePair<,>) + && finalType.GenericTypeArguments[1] == value; +#else + return pair.IsGenericType && pair.GetGenericTypeDefinition() == model.MapType(typeof(System.Collections.Generic.KeyValuePair<,>)) + && pair.GetGenericArguments()[1] == value; +#endif + } + + private bool TryDeserializeList(TypeModel model, ProtoReader reader, DataFormat format, int tag, Type listType, Type itemType, ref object value) + { + MethodInfo addMethod = TypeModel.ResolveListAdd(model, listType, itemType, out bool isList); + if (addMethod == null) throw new NotSupportedException("Unknown list variant: " + listType.FullName); + bool found = false; + object nextItem = null; + IList list = value as IList; + object[] args = isList ? null : new object[1]; + BasicList arraySurrogate = listType.IsArray ? new BasicList() : null; + + while (TryDeserializeAuxiliaryType(reader, format, tag, itemType, ref nextItem, true, true, true, true, value ?? listType)) + { + found = true; + if (value == null && arraySurrogate == null) + { + value = CreateListInstance(listType, itemType); + list = value as IList; + } + if (list != null) + { + list.Add(nextItem); + } + else if (arraySurrogate != null) + { + arraySurrogate.Add(nextItem); + } + else + { + args[0] = nextItem; + addMethod.Invoke(value, args); + } + nextItem = null; + } + if (arraySurrogate != null) + { + Array newArray; + if (value != null) + { + if (arraySurrogate.Count == 0) + { // we'll stay with what we had, thanks + } + else + { + Array existing = (Array)value; + newArray = Array.CreateInstance(itemType, existing.Length + arraySurrogate.Count); + Array.Copy(existing, newArray, existing.Length); + arraySurrogate.CopyTo(newArray, existing.Length); + value = newArray; + } + } + else + { + newArray = Array.CreateInstance(itemType, arraySurrogate.Count); + arraySurrogate.CopyTo(newArray, 0); + value = newArray; + } + } + return found; + } + + private static object CreateListInstance(Type listType, Type itemType) + { + Type concreteListType = listType; + + if (listType.IsArray) + { + return Array.CreateInstance(itemType, 0); + } + +#if COREFX || PROFILE259 + TypeInfo listTypeInfo = listType.GetTypeInfo(); + if (!listTypeInfo.IsClass || listTypeInfo.IsAbstract || + Helpers.GetConstructor(listTypeInfo, Helpers.EmptyTypes, true) == null) +#else + if (!listType.IsClass || listType.IsAbstract || + Helpers.GetConstructor(listType, Helpers.EmptyTypes, true) == null) +#endif + { + string fullName; + bool handled = false; +#if COREFX || PROFILE259 + if (listTypeInfo.IsInterface && +#else + if (listType.IsInterface && +#endif + (fullName = listType.FullName) != null && fullName.IndexOf("Dictionary") >= 0) // have to try to be frugal here... + { +#if COREFX || PROFILE259 + TypeInfo finalType = listType.GetTypeInfo(); + if (finalType.IsGenericType && finalType.GetGenericTypeDefinition() == typeof(System.Collections.Generic.IDictionary<,>)) + { + Type[] genericTypes = listType.GenericTypeArguments; + concreteListType = typeof(System.Collections.Generic.Dictionary<,>).MakeGenericType(genericTypes); + handled = true; + } +#else + if (listType.IsGenericType && listType.GetGenericTypeDefinition() == typeof(System.Collections.Generic.IDictionary<,>)) + { + Type[] genericTypes = listType.GetGenericArguments(); + concreteListType = typeof(System.Collections.Generic.Dictionary<,>).MakeGenericType(genericTypes); + handled = true; + } +#endif + +#if !PORTABLE && !COREFX && !PROFILE259 + if (!handled && listType == typeof(IDictionary)) + { + concreteListType = typeof(Hashtable); + handled = true; + } +#endif + } + + if (!handled) + { + concreteListType = typeof(System.Collections.Generic.List<>).MakeGenericType(itemType); + handled = true; + } + +#if !PORTABLE && !COREFX && !PROFILE259 + if (!handled) + { + concreteListType = typeof(ArrayList); + handled = true; + } +#endif + } + return Activator.CreateInstance(concreteListType); + } + + /// + /// This is the more "complete" version of Deserialize, which handles single instances of mapped types. + /// The value is read as a complete field, including field-header and (for sub-objects) a + /// length-prefix..kmc + /// + /// In addition to that, this provides support for: + /// - basic values; individual int / string / Guid / etc + /// - IList sets of any type handled by TryDeserializeAuxiliaryType + /// + internal bool TryDeserializeAuxiliaryType(ProtoReader reader, DataFormat format, int tag, Type type, ref object value, bool skipOtherFields, bool asListItem, bool autoCreate, bool insideList, object parentListOrType) + { + if (type == null) throw new ArgumentNullException(nameof(type)); + Type itemType = null; + ProtoTypeCode typecode = Helpers.GetTypeCode(type); + WireType wiretype = GetWireType(typecode, format, ref type, out int modelKey); + + bool found = false; + if (wiretype == WireType.None) + { + itemType = GetListItemType(this, type); + if (itemType == null && type.IsArray && type.GetArrayRank() == 1 && type != typeof(byte[])) + { + itemType = type.GetElementType(); + } + if (itemType != null) + { + if (insideList) throw TypeModel.CreateNestedListsNotSupported((parentListOrType as Type) ?? (parentListOrType?.GetType())); + found = TryDeserializeList(this, reader, format, tag, type, itemType, ref value); + if (!found && autoCreate) + { + value = CreateListInstance(type, itemType); + } + return found; + } + + // otherwise, not a happy bunny... + ThrowUnexpectedType(type); + } + + // to treat correctly, should read all values + + while (true) + { + // for convenience (re complex exit conditions), additional exit test here: + // if we've got the value, are only looking for one, and we aren't a list - then exit + if (found && asListItem) break; + + + // read the next item + int fieldNumber = reader.ReadFieldHeader(); + if (fieldNumber <= 0) break; + if (fieldNumber != tag) + { + if (skipOtherFields) + { + reader.SkipField(); + continue; + } + throw ProtoReader.AddErrorData(new InvalidOperationException( + "Expected field " + tag.ToString() + ", but found " + fieldNumber.ToString()), reader); + } + found = true; + reader.Hint(wiretype); // handle signed data etc + + if (modelKey >= 0) + { + switch (wiretype) + { + case WireType.String: + case WireType.StartGroup: + SubItemToken token = ProtoReader.StartSubItem(reader); + value = Deserialize(modelKey, value, reader); + ProtoReader.EndSubItem(token, reader); + continue; + default: + value = Deserialize(modelKey, value, reader); + continue; + } + } + switch (typecode) + { + case ProtoTypeCode.Int16: value = reader.ReadInt16(); continue; + case ProtoTypeCode.Int32: value = reader.ReadInt32(); continue; + case ProtoTypeCode.Int64: value = reader.ReadInt64(); continue; + case ProtoTypeCode.UInt16: value = reader.ReadUInt16(); continue; + case ProtoTypeCode.UInt32: value = reader.ReadUInt32(); continue; + case ProtoTypeCode.UInt64: value = reader.ReadUInt64(); continue; + case ProtoTypeCode.Boolean: value = reader.ReadBoolean(); continue; + case ProtoTypeCode.SByte: value = reader.ReadSByte(); continue; + case ProtoTypeCode.Byte: value = reader.ReadByte(); continue; + case ProtoTypeCode.Char: value = (char)reader.ReadUInt16(); continue; + case ProtoTypeCode.Double: value = reader.ReadDouble(); continue; + case ProtoTypeCode.Single: value = reader.ReadSingle(); continue; + case ProtoTypeCode.DateTime: value = BclHelpers.ReadDateTime(reader); continue; + case ProtoTypeCode.Decimal: value = BclHelpers.ReadDecimal(reader); continue; + case ProtoTypeCode.String: value = reader.ReadString(); continue; + case ProtoTypeCode.ByteArray: value = ProtoReader.AppendBytes((byte[])value, reader); continue; + case ProtoTypeCode.TimeSpan: value = BclHelpers.ReadTimeSpan(reader); continue; + case ProtoTypeCode.Guid: value = BclHelpers.ReadGuid(reader); continue; + case ProtoTypeCode.Uri: value = new Uri(reader.ReadString(), UriKind.RelativeOrAbsolute); continue; + } + + } + if (!found && !asListItem && autoCreate) + { + if (type != typeof(string)) + { + value = Activator.CreateInstance(type); + } + } + return found; + } + +#if !NO_RUNTIME + /// + /// Creates a new runtime model, to which the caller + /// can add support for a range of types. A model + /// can be used "as is", or can be compiled for + /// optimal performance. + /// + [Obsolete("Please use RuntimeTypeModel.Create", false)] + public static RuntimeTypeModel Create() + { + return RuntimeTypeModel.Create(); + } +#endif + + /// + /// Applies common proxy scenarios, resolving the actual type to consider + /// + protected internal static Type ResolveProxies(Type type) + { + if (type == null) return null; +#if !NO_GENERICS + if (type.IsGenericParameter) return null; + // Nullable + Type tmp = Helpers.GetUnderlyingType(type); + if (tmp != null) return tmp; +#endif + +#if !CF + // EF POCO + string fullName = type.FullName; + if (fullName != null && fullName.StartsWith("System.Data.Entity.DynamicProxies.")) + { +#if COREFX || PROFILE259 + return type.GetTypeInfo().BaseType; +#else + return type.BaseType; +#endif + } + + // NHibernate +#if PROFILE259 + IEnumerable interfaces = type.GetTypeInfo().ImplementedInterfaces; +#else + Type[] interfaces = type.GetInterfaces(); +#endif + foreach (Type t in interfaces) + { + switch (t.FullName) + { + case "NHibernate.Proxy.INHibernateProxy": + case "NHibernate.Proxy.DynamicProxy.IProxy": + case "NHibernate.Intercept.IFieldInterceptorAccessor": +#if COREFX || PROFILE259 + return type.GetTypeInfo().BaseType; +#else + return type.BaseType; +#endif + } + } +#endif + return null; + } + + /// + /// Indicates whether the supplied type is explicitly modelled by the model + /// + public bool IsDefined(Type type) => GetKey(ref type) >= 0; + + readonly Dictionary knownKeys = new Dictionary(); + + // essentially just a ValueTuple - I just don't want the extra dependency + private readonly struct KnownTypeKey + { + public KnownTypeKey(Type type, int key) + { + Type = type; + Key = key; + } + + public int Key { get; } + + public Type Type { get; } + } + + /// + /// Provides the key that represents a given type in the current model. + /// The type is also normalized for proxies at the same time. + /// + protected internal int GetKey(ref Type type) + { + if (type == null) return -1; + int key; + lock (knownKeys) + { + if (knownKeys.TryGetValue(type, out var tuple)) + { + // the type can be changed via ResolveProxies etc +#if DEBUG + var actualKey = GetKeyImpl(type); + if(actualKey != tuple.Key) + { + throw new InvalidOperationException( + $"Key cache failure; got {tuple.Key} instead of {actualKey} for '{type.Name}'"); + } +#endif + type = tuple.Type; + return tuple.Key; + } + } + key = GetKeyImpl(type); + Type originalType = type; + if (key < 0) + { + Type normalized = ResolveProxies(type); + if (normalized != null && normalized != type) + { + type = normalized; // hence ref + key = GetKeyImpl(type); + } + } + lock (knownKeys) + { + knownKeys[originalType] = new KnownTypeKey(type, key); + } + return key; + } + + /// + /// Advertise that a type's key can have changed + /// + internal void ResetKeyCache() + { + // clear *everything* (think: multi-level - can be many descendents) + lock(knownKeys) + { + knownKeys.Clear(); + } + } + + /// + /// Provides the key that represents a given type in the current model. + /// + protected abstract int GetKeyImpl(Type type); + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream. + /// + /// Represents the type (including inheritance) to consider. + /// The existing instance to be serialized (cannot be null). + /// The destination stream to write to. + protected internal abstract void Serialize(int key, object value, ProtoWriter dest); + + /// + /// Applies a protocol-buffer stream to an existing instance (which may be null). + /// + /// Represents the type (including inheritance) to consider. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + protected internal abstract object Deserialize(int key, object value, ProtoReader source); + + //internal ProtoSerializer Create(IProtoSerializer head) + //{ + // return new RuntimeSerializer(head, this); + //} + //internal ProtoSerializer Compile + + /// + /// Indicates the type of callback to be used + /// + protected internal enum CallbackType + { + /// + /// Invoked before an object is serialized + /// + BeforeSerialize, + /// + /// Invoked after an object is serialized + /// + AfterSerialize, + /// + /// Invoked before an object is deserialized (or when a new instance is created) + /// + BeforeDeserialize, + /// + /// Invoked after an object is deserialized + /// + AfterDeserialize + } + + /// + /// Create a deep clone of the supplied instance; any sub-items are also cloned. + /// + public object DeepClone(object value) + { + if (value == null) return null; + Type type = value.GetType(); + int key = GetKey(ref type); + + if (key >= 0 && !Helpers.IsEnum(type)) + { + using (MemoryStream ms = new MemoryStream()) + { + using (ProtoWriter writer = ProtoWriter.Create(ms, this, null)) + { + writer.SetRootObject(value); + Serialize(key, value, writer); + writer.Close(); + } + ms.Position = 0; + ProtoReader reader = null; + try + { + reader = ProtoReader.Create(ms, this, null, ProtoReader.TO_EOF); + return Deserialize(key, null, reader); + } + finally + { + ProtoReader.Recycle(reader); + } + } + } + if (type == typeof(byte[])) + { + byte[] orig = (byte[])value, clone = new byte[orig.Length]; + Buffer.BlockCopy(orig, 0, clone, 0, orig.Length); + return clone; + } + else if (GetWireType(Helpers.GetTypeCode(type), DataFormat.Default, ref type, out int modelKey) != WireType.None && modelKey < 0) + { // immutable; just return the original value + return value; + } + using (MemoryStream ms = new MemoryStream()) + { + using (ProtoWriter writer = ProtoWriter.Create(ms, this, null)) + { + if (!TrySerializeAuxiliaryType(writer, type, DataFormat.Default, Serializer.ListItemTag, value, false, null)) ThrowUnexpectedType(type); + writer.Close(); + } + ms.Position = 0; + ProtoReader reader = null; + try + { + reader = ProtoReader.Create(ms, this, null, ProtoReader.TO_EOF); + value = null; // start from scratch! + TryDeserializeAuxiliaryType(reader, DataFormat.Default, Serializer.ListItemTag, type, ref value, true, false, true, false, null); + return value; + } + finally + { + ProtoReader.Recycle(reader); + } + } + } + + /// + /// Indicates that while an inheritance tree exists, the exact type encountered was not + /// specified in that hierarchy and cannot be processed. + /// + protected internal static void ThrowUnexpectedSubtype(Type expected, Type actual) + { + if (expected != TypeModel.ResolveProxies(actual)) + { + throw new InvalidOperationException("Unexpected sub-type: " + actual.FullName); + } + } + + /// + /// Indicates that the given type was not expected, and cannot be processed. + /// + protected internal static void ThrowUnexpectedType(Type type) + { + string fullName = type == null ? "(unknown)" : type.FullName; + + if (type != null) + { + Type baseType = type +#if COREFX || PROFILE259 + .GetTypeInfo() +#endif + .BaseType; + if (baseType != null && baseType +#if COREFX || PROFILE259 + .GetTypeInfo() +#endif + .IsGenericType && baseType.GetGenericTypeDefinition().Name == "GeneratedMessage`2") + { + throw new InvalidOperationException( + "Are you mixing protobuf-net and protobuf-csharp-port? See https://stackoverflow.com/q/11564914/23354; type: " + fullName); + } + } + + throw new InvalidOperationException("Type is not expected, and no contract can be inferred: " + fullName); + } + + internal static Exception CreateNestedListsNotSupported(Type type) + { + return new NotSupportedException("Nested or jagged lists and arrays are not supported: " + (type?.FullName ?? "(null)")); + } + + /// + /// Indicates that the given type cannot be constructed; it may still be possible to + /// deserialize into existing instances. + /// + public static void ThrowCannotCreateInstance(Type type) + { + throw new ProtoException("No parameterless constructor found for " + (type?.FullName ?? "(null)")); + } + + internal static string SerializeType(TypeModel model, System.Type type) + { + if (model != null) + { + TypeFormatEventHandler handler = model.DynamicTypeFormatting; + if (handler != null) + { + TypeFormatEventArgs args = new TypeFormatEventArgs(type); + handler(model, args); + if (!string.IsNullOrEmpty(args.FormattedName)) return args.FormattedName; + } + } + return type.AssemblyQualifiedName; + } + + internal static Type DeserializeType(TypeModel model, string value) + { + + if (model != null) + { + TypeFormatEventHandler handler = model.DynamicTypeFormatting; + if (handler != null) + { + TypeFormatEventArgs args = new TypeFormatEventArgs(value); + handler(model, args); + if (args.Type != null) return args.Type; + } + } + return Type.GetType(value); + } + + /// + /// Returns true if the type supplied is either a recognised contract type, + /// or a *list* of a recognised contract type. + /// + /// Note that primitives always return false, even though the engine + /// will, if forced, try to serialize such + /// True if this type is recognised as a serializable entity, else false + public bool CanSerializeContractType(Type type) => CanSerialize(type, false, true, true); + + /// + /// Returns true if the type supplied is a basic type with inbuilt handling, + /// a recognised contract type, or a *list* of a basic / contract type. + /// + public bool CanSerialize(Type type) => CanSerialize(type, true, true, true); + + /// + /// Returns true if the type supplied is a basic type with inbuilt handling, + /// or a *list* of a basic type with inbuilt handling + /// + public bool CanSerializeBasicType(Type type) => CanSerialize(type, true, false, true); + + private bool CanSerialize(Type type, bool allowBasic, bool allowContract, bool allowLists) + { + if (type == null) throw new ArgumentNullException(nameof(type)); + Type tmp = Helpers.GetUnderlyingType(type); + if (tmp != null) type = tmp; + + // is it a basic type? + ProtoTypeCode typeCode = Helpers.GetTypeCode(type); + switch (typeCode) + { + case ProtoTypeCode.Empty: + case ProtoTypeCode.Unknown: + break; + default: + return allowBasic; // well-known basic type + } + int modelKey = GetKey(ref type); + if (modelKey >= 0) return allowContract; // known contract type + + // is it a list? + if (allowLists) + { + Type itemType = null; + if (type.IsArray) + { // note we don't need to exclude byte[], as that is handled by GetTypeCode already + if (type.GetArrayRank() == 1) itemType = type.GetElementType(); + } + else + { + itemType = GetListItemType(this, type); + } + if (itemType != null) return CanSerialize(itemType, allowBasic, allowContract, false); + } + return false; + } + + /// + /// Suggest a .proto definition for the given type + /// + /// The type to generate a .proto definition for, or null to generate a .proto that represents the entire model + /// The .proto definition as a string + public virtual string GetSchema(Type type) => GetSchema(type, ProtoSyntax.Proto2); + + /// + /// Suggest a .proto definition for the given type + /// + /// The type to generate a .proto definition for, or null to generate a .proto that represents the entire model + /// The .proto definition as a string + /// The .proto syntax to use for the operation + public virtual string GetSchema(Type type, ProtoSyntax syntax) + { + throw new NotSupportedException(); + } + + /// + /// Used to provide custom services for writing and parsing type names when using dynamic types. Both parsing and formatting + /// are provided on a single API as it is essential that both are mapped identically at all times. + /// + public event TypeFormatEventHandler DynamicTypeFormatting; + +#if PLAT_BINARYFORMATTER && !(COREFX || PROFILE259) + /// + /// Creates a new IFormatter that uses protocol-buffer [de]serialization. + /// + /// A new IFormatter to be used during [de]serialization. + /// The type of object to be [de]deserialized by the formatter. + public System.Runtime.Serialization.IFormatter CreateFormatter(Type type) + { + return new Formatter(this, type); + } + + internal sealed class Formatter : System.Runtime.Serialization.IFormatter + { + private readonly TypeModel model; + private readonly Type type; + internal Formatter(TypeModel model, Type type) + { + this.model = model ?? throw new ArgumentNullException(nameof(model)); + this.type = type ?? throw new ArgumentNullException(nameof(type)); + } + private System.Runtime.Serialization.SerializationBinder binder; + public System.Runtime.Serialization.SerializationBinder Binder + { + get { return binder; } + set { binder = value; } + } + + private System.Runtime.Serialization.StreamingContext context; + public System.Runtime.Serialization.StreamingContext Context + { + get { return context; } + set { context = value; } + } + + public object Deserialize(Stream source) + { + return model.Deserialize(source, null, type, (long)-1, Context); + } + + public void Serialize(Stream destination, object graph) + { + model.Serialize(destination, graph, Context); + } + + private System.Runtime.Serialization.ISurrogateSelector surrogateSelector; + public System.Runtime.Serialization.ISurrogateSelector SurrogateSelector + { + get { return surrogateSelector; } + set { surrogateSelector = value; } + } + } +#endif + +#if DEBUG // this is used by some unit tests only, to ensure no buffering when buffering is disabled + private bool forwardsOnly; + /// + /// If true, buffering of nested objects is disabled + /// + public bool ForwardsOnly + { + get { return forwardsOnly; } + set { forwardsOnly = value; } + } +#endif + + internal virtual Type GetType(string fullName, Assembly context) + { + return ResolveKnownType(fullName, this, context); + } + + [System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.NoInlining)] + internal static Type ResolveKnownType(string name, TypeModel model, Assembly assembly) + { + if (string.IsNullOrEmpty(name)) return null; + try + { + Type type = Type.GetType(name); + + if (type != null) return type; + } + catch { } + try + { + int i = name.IndexOf(','); + string fullName = (i > 0 ? name.Substring(0, i) : name).Trim(); +#if !(COREFX || PROFILE259) + if (assembly == null) assembly = Assembly.GetCallingAssembly(); +#endif + Type type = assembly?.GetType(fullName); + if (type != null) return type; + } + catch { } + return null; + } + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/Meta/TypeModel.cs.meta b/Runtime/Protobuf-net/Meta/TypeModel.cs.meta new file mode 100644 index 0000000..cc869c3 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/TypeModel.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: e5eb182ec8bc8c5469c7819c0e3f7fb4 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Meta/ValueMember.cs b/Runtime/Protobuf-net/Meta/ValueMember.cs new file mode 100644 index 0000000..9566312 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/ValueMember.cs @@ -0,0 +1,855 @@ +#if !NO_RUNTIME +using System; + +using ProtoBuf.Serializers; +using System.Globalization; +using System.Collections.Generic; + +#if PROFILE259 +using System.Reflection; +using System.Linq; +#else +using System.Reflection; +#endif + +namespace ProtoBuf.Meta +{ + /// + /// Represents a member (property/field) that is mapped to a protobuf field + /// + public class ValueMember + { + private int _fieldNumber; + /// + /// The number that identifies this member in a protobuf stream + /// + public int FieldNumber + { + get => _fieldNumber; + internal set + { + if (_fieldNumber != value) + { + MetaType.AssertValidFieldNumber(value); + ThrowIfFrozen(); + _fieldNumber = value; + } + } + } + + private readonly MemberInfo originalMember; + private MemberInfo backingMember; + /// + /// Gets the member (field/property) which this member relates to. + /// + public MemberInfo Member { get { return originalMember; } } + /// + /// Gets the backing member (field/property) which this member relates to + /// + public MemberInfo BackingMember + { + get { return backingMember; } + set + { + if (backingMember != value) + { + ThrowIfFrozen(); + backingMember = value; + } + } + } + + private readonly Type parentType, itemType, defaultType, memberType; + private object defaultValue; + + /// + /// Within a list / array / etc, the type of object for each item in the list (especially useful with ArrayList) + /// + public Type ItemType => itemType; + + /// + /// The underlying type of the member + /// + public Type MemberType => memberType; + + /// + /// For abstract types (IList etc), the type of concrete object to create (if required) + /// + public Type DefaultType => defaultType; + + /// + /// The type the defines the member + /// + public Type ParentType => parentType; + + /// + /// The default value of the item (members with this value will not be serialized) + /// + public object DefaultValue + { + get { return defaultValue; } + set + { + if (defaultValue != value) + { + ThrowIfFrozen(); + defaultValue = value; + } + } + } + + private readonly RuntimeTypeModel model; + /// + /// Creates a new ValueMember instance + /// + public ValueMember(RuntimeTypeModel model, Type parentType, int fieldNumber, MemberInfo member, Type memberType, Type itemType, Type defaultType, DataFormat dataFormat, object defaultValue) + : this(model, fieldNumber, memberType, itemType, defaultType, dataFormat) + { + if (parentType == null) throw new ArgumentNullException("parentType"); + if (fieldNumber < 1 && !Helpers.IsEnum(parentType)) throw new ArgumentOutOfRangeException("fieldNumber"); + + this.originalMember = member ?? throw new ArgumentNullException("member"); + this.parentType = parentType; + if (fieldNumber < 1 && !Helpers.IsEnum(parentType)) throw new ArgumentOutOfRangeException("fieldNumber"); + //#if WINRT + if (defaultValue != null && model.MapType(defaultValue.GetType()) != memberType) + //#else + // if (defaultValue != null && !memberType.IsInstanceOfType(defaultValue)) + //#endif + { + defaultValue = ParseDefaultValue(memberType, defaultValue); + } + this.defaultValue = defaultValue; + + MetaType type = model.FindWithoutAdd(memberType); + if (type != null) + { + AsReference = type.AsReferenceDefault; + } + else + { // we need to scan the hard way; can't risk recursion by fully walking it + AsReference = MetaType.GetAsReferenceDefault(model, memberType); + } + } + /// + /// Creates a new ValueMember instance + /// + internal ValueMember(RuntimeTypeModel model, int fieldNumber, Type memberType, Type itemType, Type defaultType, DataFormat dataFormat) + { + _fieldNumber = fieldNumber; + this.memberType = memberType ?? throw new ArgumentNullException(nameof(memberType)); + this.itemType = itemType; + this.defaultType = defaultType; + + this.model = model ?? throw new ArgumentNullException(nameof(model)); + this.dataFormat = dataFormat; + } + internal object GetRawEnumValue() + { +#if PORTABLE || CF || COREFX || PROFILE259 + object value = ((FieldInfo)originalMember).GetValue(null); + switch(Helpers.GetTypeCode(Enum.GetUnderlyingType(((FieldInfo)originalMember).FieldType))) + { + case ProtoTypeCode.SByte: return (sbyte)value; + case ProtoTypeCode.Byte: return (byte)value; + case ProtoTypeCode.Int16: return (short)value; + case ProtoTypeCode.UInt16: return (ushort)value; + case ProtoTypeCode.Int32: return (int)value; + case ProtoTypeCode.UInt32: return (uint)value; + case ProtoTypeCode.Int64: return (long)value; + case ProtoTypeCode.UInt64: return (ulong)value; + default: + throw new InvalidOperationException(); + } +#else + return ((FieldInfo)originalMember).GetRawConstantValue(); +#endif + } + private static object ParseDefaultValue(Type type, object value) + { + { + Type tmp = Helpers.GetUnderlyingType(type); + if (tmp != null) type = tmp; + } + if (value is string s) + { + if (Helpers.IsEnum(type)) return Helpers.ParseEnum(type, s); + + switch (Helpers.GetTypeCode(type)) + { + case ProtoTypeCode.Boolean: return bool.Parse(s); + case ProtoTypeCode.Byte: return byte.Parse(s, NumberStyles.Integer, CultureInfo.InvariantCulture); + case ProtoTypeCode.Char: // char.Parse missing on CF/phone7 + if (s.Length == 1) return s[0]; + throw new FormatException("Single character expected: \"" + s + "\""); + case ProtoTypeCode.DateTime: return DateTime.Parse(s, CultureInfo.InvariantCulture); + case ProtoTypeCode.Decimal: return decimal.Parse(s, NumberStyles.Any, CultureInfo.InvariantCulture); + case ProtoTypeCode.Double: return double.Parse(s, NumberStyles.Any, CultureInfo.InvariantCulture); + case ProtoTypeCode.Int16: return short.Parse(s, NumberStyles.Any, CultureInfo.InvariantCulture); + case ProtoTypeCode.Int32: return int.Parse(s, NumberStyles.Any, CultureInfo.InvariantCulture); + case ProtoTypeCode.Int64: return long.Parse(s, NumberStyles.Any, CultureInfo.InvariantCulture); + case ProtoTypeCode.SByte: return sbyte.Parse(s, NumberStyles.Integer, CultureInfo.InvariantCulture); + case ProtoTypeCode.Single: return float.Parse(s, NumberStyles.Any, CultureInfo.InvariantCulture); + case ProtoTypeCode.String: return s; + case ProtoTypeCode.UInt16: return ushort.Parse(s, NumberStyles.Any, CultureInfo.InvariantCulture); + case ProtoTypeCode.UInt32: return uint.Parse(s, NumberStyles.Any, CultureInfo.InvariantCulture); + case ProtoTypeCode.UInt64: return ulong.Parse(s, NumberStyles.Any, CultureInfo.InvariantCulture); + case ProtoTypeCode.TimeSpan: return TimeSpan.Parse(s); + case ProtoTypeCode.Uri: return s; // Uri is decorated as string + case ProtoTypeCode.Guid: return new Guid(s); + } + } + + if (Helpers.IsEnum(type)) return Enum.ToObject(type, value); + return Convert.ChangeType(value, type, CultureInfo.InvariantCulture); + + } + + private IProtoSerializer serializer; + internal IProtoSerializer Serializer + { + get + { + return serializer ?? (serializer = BuildSerializer()); + } + } + + private DataFormat dataFormat; + /// + /// Specifies the rules used to process the field; this is used to determine the most appropriate + /// wite-type, but also to describe subtypes within that wire-type (such as SignedVariant) + /// + public DataFormat DataFormat + { + get { return dataFormat; } + set + { + if (value != dataFormat) + { + ThrowIfFrozen(); + this.dataFormat = value; + } + } + } + + /// + /// Indicates whether this field should follow strict encoding rules; this means (for example) that if a "fixed32" + /// is encountered when "variant" is defined, then it will fail (throw an exception) when parsing. Note that + /// when serializing the defined type is always used. + /// + public bool IsStrict + { + get { return HasFlag(OPTIONS_IsStrict); } + set { SetFlag(OPTIONS_IsStrict, value, true); } + } + + /// + /// Indicates whether this field should use packed encoding (which can save lots of space for repeated primitive values). + /// This option only applies to list/array data of primitive types (int, double, etc). + /// + public bool IsPacked + { + get { return HasFlag(OPTIONS_IsPacked); } + set { SetFlag(OPTIONS_IsPacked, value, true); } + } + + /// + /// Indicates whether this field should *repace* existing values (the default is false, meaning *append*). + /// This option only applies to list/array data. + /// + public bool OverwriteList + { + get { return HasFlag(OPTIONS_OverwriteList); } + set { SetFlag(OPTIONS_OverwriteList, value, true); } + } + + /// + /// Indicates whether this field is mandatory. + /// + public bool IsRequired + { + get { return HasFlag(OPTIONS_IsRequired); } + set { SetFlag(OPTIONS_IsRequired, value, true); } + } + + /// + /// Enables full object-tracking/full-graph support. + /// + public bool AsReference + { + get { return HasFlag(OPTIONS_AsReference); } + set { SetFlag(OPTIONS_AsReference, value, true); } + } + + /// + /// Embeds the type information into the stream, allowing usage with types not known in advance. + /// + public bool DynamicType + { + get { return HasFlag(OPTIONS_DynamicType); } + set { SetFlag(OPTIONS_DynamicType, value, true); } + } + + /// + /// Indicates that the member should be treated as a protobuf Map + /// + public bool IsMap + { + get { return HasFlag(OPTIONS_IsMap); } + set { SetFlag(OPTIONS_IsMap, value, true); } + } + + private DataFormat mapKeyFormat, mapValueFormat; + /// + /// Specifies the data-format that should be used for the key, when IsMap is enabled + /// + public DataFormat MapKeyFormat + { + get { return mapKeyFormat; } + set + { + if (mapKeyFormat != value) + { + ThrowIfFrozen(); + mapKeyFormat = value; + } + } + } + /// + /// Specifies the data-format that should be used for the value, when IsMap is enabled + /// + public DataFormat MapValueFormat + { + get { return mapValueFormat; } + set + { + if (mapValueFormat != value) + { + ThrowIfFrozen(); + mapValueFormat = value; + } + } + } + + private MethodInfo getSpecified, setSpecified; + /// + /// Specifies methods for working with optional data members. + /// + /// Provides a method (null for none) to query whether this member should + /// be serialized; it must be of the form "bool {Method}()". The member is only serialized if the + /// method returns true. + /// Provides a method (null for none) to indicate that a member was + /// deserialized; it must be of the form "void {Method}(bool)", and will be called with "true" + /// when data is found. + public void SetSpecified(MethodInfo getSpecified, MethodInfo setSpecified) + { + if (this.getSpecified != getSpecified || this.setSpecified != setSpecified) + { + if (getSpecified != null) + { + if (getSpecified.ReturnType != model.MapType(typeof(bool)) + || getSpecified.IsStatic + || getSpecified.GetParameters().Length != 0) + { + throw new ArgumentException("Invalid pattern for checking member-specified", "getSpecified"); + } + } + if (setSpecified != null) + { + ParameterInfo[] args; + if (setSpecified.ReturnType != model.MapType(typeof(void)) + || setSpecified.IsStatic + || (args = setSpecified.GetParameters()).Length != 1 + || args[0].ParameterType != model.MapType(typeof(bool))) + { + throw new ArgumentException("Invalid pattern for setting member-specified", "setSpecified"); + } + } + + ThrowIfFrozen(); + this.getSpecified = getSpecified; + this.setSpecified = setSpecified; + } + } + + private void ThrowIfFrozen() + { + if (serializer != null) throw new InvalidOperationException("The type cannot be changed once a serializer has been generated"); + } + + internal bool ResolveMapTypes(out Type dictionaryType, out Type keyType, out Type valueType) + { + dictionaryType = keyType = valueType = null; + try + { +#if COREFX || PROFILE259 + var info = memberType.GetTypeInfo(); +#else + var info = memberType; +#endif + if (ImmutableCollectionDecorator.IdentifyImmutable(model, MemberType, out _, out _, out _, out _, out _, out _)) + { + return false; + } + if (info.IsInterface && info.IsGenericType && info.GetGenericTypeDefinition() == typeof(IDictionary<,>)) + { +#if PROFILE259 + var typeArgs = memberType.GetGenericTypeDefinition().GenericTypeArguments; +#else + var typeArgs = memberType.GetGenericArguments(); +#endif + if (IsValidMapKeyType(typeArgs[0])) + { + keyType = typeArgs[0]; + valueType = typeArgs[1]; + dictionaryType = memberType; + } + return false; + } +#if PROFILE259 + foreach (var iType in memberType.GetTypeInfo().ImplementedInterfaces) +#else + foreach (var iType in memberType.GetInterfaces()) +#endif + { +#if COREFX || PROFILE259 + info = iType.GetTypeInfo(); +#else + info = iType; +#endif + if (info.IsGenericType && info.GetGenericTypeDefinition() == typeof(IDictionary<,>)) + { + if (dictionaryType != null) throw new InvalidOperationException("Multiple dictionary interfaces implemented by type: " + memberType.FullName); +#if PROFILE259 + var typeArgs = iType.GetGenericTypeDefinition().GenericTypeArguments; +#else + var typeArgs = iType.GetGenericArguments(); +#endif + if (IsValidMapKeyType(typeArgs[0])) + { + keyType = typeArgs[0]; + valueType = typeArgs[1]; + dictionaryType = memberType; + } + } + } + if (dictionaryType == null) return false; + + // (note we checked the key type already) + // not a map if value is repeated + Type itemType = null, defaultType = null; + model.ResolveListTypes(valueType, ref itemType, ref defaultType); + if (itemType != null) return false; + + return dictionaryType != null; + } + catch + { + // if it isn't a good fit; don't use "map" + return false; + } + } + + static bool IsValidMapKeyType(Type type) + { + if (type == null || Helpers.IsEnum(type)) return false; + switch (Helpers.GetTypeCode(type)) + { + case ProtoTypeCode.Boolean: + case ProtoTypeCode.Byte: + case ProtoTypeCode.Char: + case ProtoTypeCode.Int16: + case ProtoTypeCode.Int32: + case ProtoTypeCode.Int64: + case ProtoTypeCode.String: + + case ProtoTypeCode.SByte: + case ProtoTypeCode.UInt16: + case ProtoTypeCode.UInt32: + case ProtoTypeCode.UInt64: + return true; + } + return false; + } + private IProtoSerializer BuildSerializer() + { + int opaqueToken = 0; + try + { + model.TakeLock(ref opaqueToken);// check nobody is still adding this type + var member = backingMember ?? originalMember; + IProtoSerializer ser; + if (IsMap) + { + ResolveMapTypes(out var dictionaryType, out var keyType, out var valueType); + + if (dictionaryType == null) + { + throw new InvalidOperationException("Unable to resolve map type for type: " + memberType.FullName); + } + var concreteType = defaultType; + if (concreteType == null && Helpers.IsClass(memberType)) + { + concreteType = memberType; + } + var keySer = TryGetCoreSerializer(model, MapKeyFormat, keyType, out var keyWireType, false, false, false, false); + if (!AsReference) + { + AsReference = MetaType.GetAsReferenceDefault(model, valueType); + } + var valueSer = TryGetCoreSerializer(model, MapValueFormat, valueType, out var valueWireType, AsReference, DynamicType, false, true); +#if PROFILE259 + IEnumerable ctors = typeof(MapDecorator<,,>).MakeGenericType(new Type[] { dictionaryType, keyType, valueType }).GetTypeInfo().DeclaredConstructors; + if (ctors.Count() != 1) + { + throw new InvalidOperationException("Unable to resolve MapDecorator constructor"); + } + ser = (IProtoSerializer)ctors.First().Invoke(new object[] {model, concreteType, keySer, valueSer, _fieldNumber, + DataFormat == DataFormat.Group ? WireType.StartGroup : WireType.String, keyWireType, valueWireType, OverwriteList }); +#else + var ctors = typeof(MapDecorator<,,>).MakeGenericType(new Type[] { dictionaryType, keyType, valueType }).GetConstructors( + BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance); + if (ctors.Length != 1) throw new InvalidOperationException("Unable to resolve MapDecorator constructor"); + ser = (IProtoSerializer)ctors[0].Invoke(new object[] {model, concreteType, keySer, valueSer, _fieldNumber, + DataFormat == DataFormat.Group ? WireType.StartGroup : WireType.String, keyWireType, valueWireType, OverwriteList }); +#endif + } + else + { + Type finalType = itemType ?? memberType; + ser = TryGetCoreSerializer(model, dataFormat, finalType, out WireType wireType, AsReference, DynamicType, OverwriteList, true); + if (ser == null) + { + throw new InvalidOperationException("No serializer defined for type: " + finalType.FullName); + } + + // apply tags + if (itemType != null && SupportNull) + { + if (IsPacked) + { + throw new NotSupportedException("Packed encodings cannot support null values"); + } + ser = new TagDecorator(NullDecorator.Tag, wireType, IsStrict, ser); + ser = new NullDecorator(model, ser); + ser = new TagDecorator(_fieldNumber, WireType.StartGroup, false, ser); + } + else + { + ser = new TagDecorator(_fieldNumber, wireType, IsStrict, ser); + } + // apply lists if appropriate + if (itemType != null) + { + Type underlyingItemType = SupportNull ? itemType : Helpers.GetUnderlyingType(itemType) ?? itemType; + + Helpers.DebugAssert(underlyingItemType == ser.ExpectedType + || (ser.ExpectedType == model.MapType(typeof(object)) && !Helpers.IsValueType(underlyingItemType)) + , "Wrong type in the tail; expected {0}, received {1}", ser.ExpectedType, underlyingItemType); + if (memberType.IsArray) + { + ser = new ArrayDecorator(model, ser, _fieldNumber, IsPacked, wireType, memberType, OverwriteList, SupportNull); + } + else + { + ser = ListDecorator.Create(model, memberType, defaultType, ser, _fieldNumber, IsPacked, wireType, member != null && PropertyDecorator.CanWrite(model, member), OverwriteList, SupportNull); + } + } + else if (defaultValue != null && !IsRequired && getSpecified == null) + { // note: "ShouldSerialize*" / "*Specified" / etc ^^^^ take precedence over defaultValue, + // as does "IsRequired" + ser = new DefaultValueDecorator(model, defaultValue, ser); + } + if (memberType == model.MapType(typeof(Uri))) + { + ser = new UriDecorator(model, ser); + } +#if PORTABLE + else if(memberType.FullName == typeof(Uri).FullName) + { + // In PCLs, the Uri type may not match (WinRT uses Internal/Uri, .Net uses System/Uri) + ser = new ReflectedUriDecorator(memberType, model, ser); + } +#endif + } + if (member != null) + { + if (member is PropertyInfo prop) + { + ser = new PropertyDecorator(model, parentType, prop, ser); + } + else if (member is FieldInfo fld) + { + ser = new FieldDecorator(parentType, fld, ser); + } + else + { + throw new InvalidOperationException(); + } + + if (getSpecified != null || setSpecified != null) + { + ser = new MemberSpecifiedDecorator(getSpecified, setSpecified, ser); + } + } + return ser; + } + finally + { + model.ReleaseLock(opaqueToken); + } + } + + private static WireType GetIntWireType(DataFormat format, int width) + { + switch (format) + { + case DataFormat.ZigZag: return WireType.SignedVariant; + case DataFormat.FixedSize: return width == 32 ? WireType.Fixed32 : WireType.Fixed64; + case DataFormat.TwosComplement: + case DataFormat.Default: return WireType.Variant; + default: throw new InvalidOperationException(); + } + } + private static WireType GetDateTimeWireType(DataFormat format) + { + switch (format) + { + + case DataFormat.Group: return WireType.StartGroup; + case DataFormat.FixedSize: return WireType.Fixed64; + case DataFormat.WellKnown: + case DataFormat.Default: + return WireType.String; + default: throw new InvalidOperationException(); + } + } + + internal static IProtoSerializer TryGetCoreSerializer(RuntimeTypeModel model, DataFormat dataFormat, Type type, out WireType defaultWireType, + bool asReference, bool dynamicType, bool overwriteList, bool allowComplexTypes) + { + { + Type tmp = Helpers.GetUnderlyingType(type); + if (tmp != null) type = tmp; + } + if (Helpers.IsEnum(type)) + { + if (allowComplexTypes && model != null) + { + // need to do this before checking the typecode; an int enum will report Int32 etc + defaultWireType = WireType.Variant; + return new EnumSerializer(type, model.GetEnumMap(type)); + } + else + { // enum is fine for adding as a meta-type + defaultWireType = WireType.None; + return null; + } + } + ProtoTypeCode code = Helpers.GetTypeCode(type); + switch (code) + { + case ProtoTypeCode.Int32: + defaultWireType = GetIntWireType(dataFormat, 32); + return new Int32Serializer(model); + case ProtoTypeCode.UInt32: + defaultWireType = GetIntWireType(dataFormat, 32); + return new UInt32Serializer(model); + case ProtoTypeCode.Int64: + defaultWireType = GetIntWireType(dataFormat, 64); + return new Int64Serializer(model); + case ProtoTypeCode.UInt64: + defaultWireType = GetIntWireType(dataFormat, 64); + return new UInt64Serializer(model); + case ProtoTypeCode.String: + defaultWireType = WireType.String; + if (asReference) + { + return new NetObjectSerializer(model, model.MapType(typeof(string)), 0, BclHelpers.NetObjectOptions.AsReference); + } + return new StringSerializer(model); + case ProtoTypeCode.Single: + defaultWireType = WireType.Fixed32; + return new SingleSerializer(model); + case ProtoTypeCode.Double: + defaultWireType = WireType.Fixed64; + return new DoubleSerializer(model); + case ProtoTypeCode.Boolean: + defaultWireType = WireType.Variant; + return new BooleanSerializer(model); + case ProtoTypeCode.DateTime: + defaultWireType = GetDateTimeWireType(dataFormat); + return new DateTimeSerializer(dataFormat, model); + case ProtoTypeCode.Decimal: + defaultWireType = WireType.String; + return new DecimalSerializer(model); + case ProtoTypeCode.Byte: + defaultWireType = GetIntWireType(dataFormat, 32); + return new ByteSerializer(model); + case ProtoTypeCode.SByte: + defaultWireType = GetIntWireType(dataFormat, 32); + return new SByteSerializer(model); + case ProtoTypeCode.Char: + defaultWireType = WireType.Variant; + return new CharSerializer(model); + case ProtoTypeCode.Int16: + defaultWireType = GetIntWireType(dataFormat, 32); + return new Int16Serializer(model); + case ProtoTypeCode.UInt16: + defaultWireType = GetIntWireType(dataFormat, 32); + return new UInt16Serializer(model); + case ProtoTypeCode.TimeSpan: + defaultWireType = GetDateTimeWireType(dataFormat); + return new TimeSpanSerializer(dataFormat, model); + case ProtoTypeCode.Guid: + defaultWireType = dataFormat == DataFormat.Group ? WireType.StartGroup : WireType.String; + return new GuidSerializer(model); + case ProtoTypeCode.Uri: + defaultWireType = WireType.String; + return new StringSerializer(model); + case ProtoTypeCode.ByteArray: + defaultWireType = WireType.String; + return new BlobSerializer(model, overwriteList); + case ProtoTypeCode.Type: + defaultWireType = WireType.String; + return new SystemTypeSerializer(model); + } + IProtoSerializer parseable = model.AllowParseableTypes ? ParseableSerializer.TryCreate(type, model) : null; + if (parseable != null) + { + defaultWireType = WireType.String; + return parseable; + } + if (allowComplexTypes && model != null) + { + int key = model.GetKey(type, false, true); + MetaType meta = null; + if (key >= 0) + { + meta = model[type]; + if (dataFormat == DataFormat.Default && meta.IsGroup) + { + dataFormat = DataFormat.Group; + } + } + + if (asReference || dynamicType) + { + BclHelpers.NetObjectOptions options = BclHelpers.NetObjectOptions.None; + if (asReference) options |= BclHelpers.NetObjectOptions.AsReference; + if (dynamicType) options |= BclHelpers.NetObjectOptions.DynamicType; + if (meta != null) + { // exists + if (asReference && Helpers.IsValueType(type)) + { + string message = "AsReference cannot be used with value-types"; + + if (type.Name == "KeyValuePair`2") + { + message += "; please see https://stackoverflow.com/q/14436606/23354"; + } + else + { + message += ": " + type.FullName; + } + throw new InvalidOperationException(message); + } + + if (asReference && meta.IsAutoTuple) options |= BclHelpers.NetObjectOptions.LateSet; + if (meta.UseConstructor) options |= BclHelpers.NetObjectOptions.UseConstructor; + } + defaultWireType = dataFormat == DataFormat.Group ? WireType.StartGroup : WireType.String; + return new NetObjectSerializer(model, type, key, options); + } + if (key >= 0) + { + defaultWireType = dataFormat == DataFormat.Group ? WireType.StartGroup : WireType.String; + return new SubItemSerializer(type, key, meta, true); + } + } + defaultWireType = WireType.None; + return null; + } + + + private string name; + internal void SetName(string name) + { + if (name != this.name) + { + ThrowIfFrozen(); + this.name = name; + } + } + /// + /// Gets the logical name for this member in the schema (this is not critical for binary serialization, but may be used + /// when inferring a schema). + /// + public string Name + { + get { return string.IsNullOrEmpty(name) ? originalMember.Name : name; } + set { SetName(value); } + } + + private const byte + OPTIONS_IsStrict = 1, + OPTIONS_IsPacked = 2, + OPTIONS_IsRequired = 4, + OPTIONS_OverwriteList = 8, + OPTIONS_SupportNull = 16, + OPTIONS_AsReference = 32, + OPTIONS_IsMap = 64, + OPTIONS_DynamicType = 128; + + private byte flags; + private bool HasFlag(byte flag) { return (flags & flag) == flag; } + private void SetFlag(byte flag, bool value, bool throwIfFrozen) + { + if (throwIfFrozen && HasFlag(flag) != value) + { + ThrowIfFrozen(); + } + if (value) + flags |= flag; + else + flags = (byte)(flags & ~flag); + } + + /// + /// Should lists have extended support for null values? Note this makes the serialization less efficient. + /// + public bool SupportNull + { + get { return HasFlag(OPTIONS_SupportNull); } + set { SetFlag(OPTIONS_SupportNull, value, true); } + } + + internal string GetSchemaTypeName(bool applyNetObjectProxy, ref RuntimeTypeModel.CommonImports imports) + { + Type effectiveType = ItemType; + if (effectiveType == null) effectiveType = MemberType; + return model.GetSchemaTypeName(effectiveType, DataFormat, applyNetObjectProxy && AsReference, applyNetObjectProxy && DynamicType, ref imports); + } + + + internal sealed class Comparer : System.Collections.IComparer, IComparer + { + public static readonly Comparer Default = new Comparer(); + + public int Compare(object x, object y) + { + return Compare(x as ValueMember, y as ValueMember); + } + + public int Compare(ValueMember x, ValueMember y) + { + if (ReferenceEquals(x, y)) return 0; + if (x == null) return -1; + if (y == null) return 1; + + return x.FieldNumber.CompareTo(y.FieldNumber); + } + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Meta/ValueMember.cs.meta b/Runtime/Protobuf-net/Meta/ValueMember.cs.meta new file mode 100644 index 0000000..d3eeb78 --- /dev/null +++ b/Runtime/Protobuf-net/Meta/ValueMember.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: dba7fd2d1d1c883469e153f7ac5fdd86 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/NetObjectCache.cs b/Runtime/Protobuf-net/NetObjectCache.cs new file mode 100644 index 0000000..8e83549 --- /dev/null +++ b/Runtime/Protobuf-net/NetObjectCache.cs @@ -0,0 +1,190 @@ +using System; +using System.Collections.Generic; +using ProtoBuf.Meta; + +namespace ProtoBuf +{ + internal sealed class NetObjectCache + { + internal const int Root = 0; + private MutableList underlyingList; + + private MutableList List => underlyingList ?? (underlyingList = new MutableList()); + + internal object GetKeyedObject(int key) + { + if (key-- == Root) + { + if (rootObject == null) throw new ProtoException("No root object assigned"); + return rootObject; + } + BasicList list = List; + + if (key < 0 || key >= list.Count) + { + Helpers.DebugWriteLine("Missing key: " + key); + throw new ProtoException("Internal error; a missing key occurred"); + } + + object tmp = list[key]; + if (tmp == null) + { + throw new ProtoException("A deferred key does not have a value yet"); + } + return tmp; + } + + internal void SetKeyedObject(int key, object value) + { + if (key-- == Root) + { + if (value == null) throw new ArgumentNullException(nameof(value)); + if (rootObject != null && ((object)rootObject != (object)value)) throw new ProtoException("The root object cannot be reassigned"); + rootObject = value; + } + else + { + MutableList list = List; + if (key < list.Count) + { + object oldVal = list[key]; + if (oldVal == null) + { + list[key] = value; + } + else if (!ReferenceEquals(oldVal, value)) + { + throw new ProtoException("Reference-tracked objects cannot change reference"); + } // otherwise was the same; nothing to do + } + else if (key != list.Add(value)) + { + throw new ProtoException("Internal error; a key mismatch occurred"); + } + } + } + + private object rootObject; + internal int AddObjectKey(object value, out bool existing) + { + if (value == null) throw new ArgumentNullException(nameof(value)); + + if ((object)value == (object)rootObject) // (object) here is no-op, but should be + { // preserved even if this was typed - needs ref-check + existing = true; + return Root; + } + + string s = value as string; + BasicList list = List; + int index; + + if (s == null) + { +#if CF || PORTABLE // CF has very limited proper object ref-tracking; so instead, we'll search it the hard way + index = list.IndexOfReference(value); +#else + if (objectKeys == null) + { + objectKeys = new Dictionary(ReferenceComparer.Default); + index = -1; + } + else + { + if (!objectKeys.TryGetValue(value, out index)) index = -1; + } +#endif + } + else + { + if (stringKeys == null) + { + stringKeys = new Dictionary(); + index = -1; + } + else + { + if (!stringKeys.TryGetValue(s, out index)) index = -1; + } + } + + if (!(existing = index >= 0)) + { + index = list.Add(value); + + if (s == null) + { +#if !CF && !PORTABLE // CF can't handle the object keys very well + objectKeys.Add(value, index); +#endif + } + else + { + stringKeys.Add(s, index); + } + } + return index + 1; + } + + private int trapStartIndex; // defaults to 0 - optimization for RegisterTrappedObject + // to make it faster at seeking to find deferred-objects + + internal void RegisterTrappedObject(object value) + { + if (rootObject == null) + { + rootObject = value; + } + else + { + if (underlyingList != null) + { + for (int i = trapStartIndex; i < underlyingList.Count; i++) + { + trapStartIndex = i + 1; // things never *become* null; whether or + // not the next item is null, it will never + // need to be checked again + + if (underlyingList[i] == null) + { + underlyingList[i] = value; + break; + } + } + } + } + } + + private Dictionary stringKeys; + +#if !CF && !PORTABLE // CF lacks the ability to get a robust reference-based hash-code, so we'll do it the harder way instead + private System.Collections.Generic.Dictionary objectKeys; + private sealed class ReferenceComparer : IEqualityComparer + { + public readonly static ReferenceComparer Default = new ReferenceComparer(); + private ReferenceComparer() { } + + bool IEqualityComparer.Equals(object x, object y) + { + return x == y; // ref equality + } + + int IEqualityComparer.GetHashCode(object obj) + { + return System.Runtime.CompilerServices.RuntimeHelpers.GetHashCode(obj); + } + } +#endif + + internal void Clear() + { + trapStartIndex = 0; + rootObject = null; + if (underlyingList != null) underlyingList.Clear(); + if (stringKeys != null) stringKeys.Clear(); +#if !CF && !PORTABLE + if (objectKeys != null) objectKeys.Clear(); +#endif + } + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/NetObjectCache.cs.meta b/Runtime/Protobuf-net/NetObjectCache.cs.meta new file mode 100644 index 0000000..862acc0 --- /dev/null +++ b/Runtime/Protobuf-net/NetObjectCache.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a7ec59f6037764d43b3d585baf2343e2 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/PrefixStyle.cs b/Runtime/Protobuf-net/PrefixStyle.cs new file mode 100644 index 0000000..0ebef04 --- /dev/null +++ b/Runtime/Protobuf-net/PrefixStyle.cs @@ -0,0 +1,26 @@ + +namespace ProtoBuf +{ + /// + /// Specifies the type of prefix that should be applied to messages. + /// + public enum PrefixStyle + { + /// + /// No length prefix is applied to the data; the data is terminated only be the end of the stream. + /// + None = 0, + /// + /// A base-128 ("varint", the default prefix format in protobuf) length prefix is applied to the data (efficient for short messages). + /// + Base128 = 1, + /// + /// A fixed-length (little-endian) length prefix is applied to the data (useful for compatibility). + /// + Fixed32 = 2, + /// + /// A fixed-length (big-endian) length prefix is applied to the data (useful for compatibility). + /// + Fixed32BigEndian = 3 + } +} diff --git a/Runtime/Protobuf-net/PrefixStyle.cs.meta b/Runtime/Protobuf-net/PrefixStyle.cs.meta new file mode 100644 index 0000000..a955c1f --- /dev/null +++ b/Runtime/Protobuf-net/PrefixStyle.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: c6f16948bce1f2d4eb805ed31a2bb878 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ProtoContractAttribute.cs b/Runtime/Protobuf-net/ProtoContractAttribute.cs new file mode 100644 index 0000000..e2e8054 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoContractAttribute.cs @@ -0,0 +1,175 @@ +using System; + +namespace ProtoBuf +{ + /// + /// Indicates that a type is defined for protocol-buffer serialization. + /// + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Struct | AttributeTargets.Enum | AttributeTargets.Interface, + AllowMultiple = false, Inherited = false)] + public sealed class ProtoContractAttribute : Attribute + { + /// + /// Gets or sets the defined name of the type. + /// + public string Name { get; set; } + + /// + /// Gets or sets the fist offset to use with implicit field tags; + /// only uesd if ImplicitFields is set. + /// + public int ImplicitFirstTag + { + get { return implicitFirstTag; } + set + { + if (value < 1) throw new ArgumentOutOfRangeException("ImplicitFirstTag"); + implicitFirstTag = value; + } + } + private int implicitFirstTag; + + /// + /// If specified, alternative contract markers (such as markers for XmlSerailizer or DataContractSerializer) are ignored. + /// + public bool UseProtoMembersOnly + { + get { return HasFlag(OPTIONS_UseProtoMembersOnly); } + set { SetFlag(OPTIONS_UseProtoMembersOnly, value); } + } + + /// + /// If specified, do NOT treat this type as a list, even if it looks like one. + /// + public bool IgnoreListHandling + { + get { return HasFlag(OPTIONS_IgnoreListHandling); } + set { SetFlag(OPTIONS_IgnoreListHandling, value); } + } + + /// + /// Gets or sets the mechanism used to automatically infer field tags + /// for members. This option should be used in advanced scenarios only. + /// Please review the important notes against the ImplicitFields enumeration. + /// + public ImplicitFields ImplicitFields { get; set; } + + /// + /// Enables/disables automatic tag generation based on the existing name / order + /// of the defined members. This option is not used for members marked + /// with ProtoMemberAttribute, as intended to provide compatibility with + /// WCF serialization. WARNING: when adding new fields you must take + /// care to increase the Order for new elements, otherwise data corruption + /// may occur. + /// + /// If not explicitly specified, the default is assumed from Serializer.GlobalOptions.InferTagFromName. + public bool InferTagFromName + { + get { return HasFlag(OPTIONS_InferTagFromName); } + set + { + SetFlag(OPTIONS_InferTagFromName, value); + SetFlag(OPTIONS_InferTagFromNameHasValue, true); + } + } + + /// + /// Has a InferTagFromName value been explicitly set? if not, the default from the type-model is assumed. + /// + internal bool InferTagFromNameHasValue + { // note that this property is accessed via reflection and should not be removed + get { return HasFlag(OPTIONS_InferTagFromNameHasValue); } + } + + /// + /// Specifies an offset to apply to [DataMember(Order=...)] markers; + /// this is useful when working with mex-generated classes that have + /// a different origin (usually 1 vs 0) than the original data-contract. + /// + /// This value is added to the Order of each member. + /// + public int DataMemberOffset { get; set; } + + /// + /// If true, the constructor for the type is bypassed during deserialization, meaning any field initializers + /// or other initialization code is skipped. + /// + public bool SkipConstructor + { + get { return HasFlag(OPTIONS_SkipConstructor); } + set { SetFlag(OPTIONS_SkipConstructor, value); } + } + + /// + /// Should this type be treated as a reference by default? Please also see the implications of this, + /// as recorded on ProtoMemberAttribute.AsReference + /// + public bool AsReferenceDefault + { + get { return HasFlag(OPTIONS_AsReferenceDefault); } + set + { + SetFlag(OPTIONS_AsReferenceDefault, value); + } + } + + /// + /// Indicates whether this type should always be treated as a "group" (rather than a string-prefixed sub-message) + /// + public bool IsGroup + { + get { return HasFlag(OPTIONS_IsGroup); } + set + { + SetFlag(OPTIONS_IsGroup, value); + } + } + + private bool HasFlag(ushort flag) { return (flags & flag) == flag; } + private void SetFlag(ushort flag, bool value) + { + if (value) flags |= flag; + else flags = (ushort)(flags & ~flag); + } + + private ushort flags; + + private const ushort + OPTIONS_InferTagFromName = 1, + OPTIONS_InferTagFromNameHasValue = 2, + OPTIONS_UseProtoMembersOnly = 4, + OPTIONS_SkipConstructor = 8, + OPTIONS_IgnoreListHandling = 16, + OPTIONS_AsReferenceDefault = 32, + OPTIONS_EnumPassthru = 64, + OPTIONS_EnumPassthruHasValue = 128, + OPTIONS_IsGroup = 256; + + /// + /// Applies only to enums (not to DTO classes themselves); gets or sets a value indicating that an enum should be treated directly as an int/short/etc, rather + /// than enforcing .proto enum rules. This is useful *in particul* for [Flags] enums. + /// + public bool EnumPassthru + { + get { return HasFlag(OPTIONS_EnumPassthru); } + set + { + SetFlag(OPTIONS_EnumPassthru, value); + SetFlag(OPTIONS_EnumPassthruHasValue, true); + } + } + + /// + /// Allows to define a surrogate type used for serialization/deserialization purpose. + /// + public Type Surrogate { get; set; } + + /// + /// Has a EnumPassthru value been explicitly set? + /// + internal bool EnumPassthruHasValue + { // note that this property is accessed via reflection and should not be removed + get { return HasFlag(OPTIONS_EnumPassthruHasValue); } + } + } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/ProtoContractAttribute.cs.meta b/Runtime/Protobuf-net/ProtoContractAttribute.cs.meta new file mode 100644 index 0000000..d000688 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoContractAttribute.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: e5d57dba877f0854c999b91a6514d93d +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ProtoConverterAttribute.cs b/Runtime/Protobuf-net/ProtoConverterAttribute.cs new file mode 100644 index 0000000..b75bb80 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoConverterAttribute.cs @@ -0,0 +1,13 @@ +using System; + +namespace ProtoBuf +{ + /// + /// Indicates that a static member should be considered the same as though + /// were an implicit / explicit conversion operator; in particular, this + /// is useful for conversions that operator syntax does not allow, such as + /// to/from interface types. + /// + [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = true)] + public class ProtoConverterAttribute : Attribute { } +} \ No newline at end of file diff --git a/Runtime/Protobuf-net/ProtoConverterAttribute.cs.meta b/Runtime/Protobuf-net/ProtoConverterAttribute.cs.meta new file mode 100644 index 0000000..323a3a4 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoConverterAttribute.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 399681000a748834f87d721feda5f459 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ProtoEnumAttribute.cs b/Runtime/Protobuf-net/ProtoEnumAttribute.cs new file mode 100644 index 0000000..1d82645 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoEnumAttribute.cs @@ -0,0 +1,36 @@ +using System; + +namespace ProtoBuf +{ + /// + /// Used to define protocol-buffer specific behavior for + /// enumerated values. + /// + [AttributeUsage(AttributeTargets.Field, AllowMultiple = false)] + public sealed class ProtoEnumAttribute : Attribute + { + /// + /// Gets or sets the specific value to use for this enum during serialization. + /// + public int Value + { + get { return enumValue; } + set { this.enumValue = value; hasValue = true; } + } + + /// + /// Indicates whether this instance has a customised value mapping + /// + /// true if a specific value is set + public bool HasValue() => hasValue; + + private bool hasValue; + private int enumValue; + + /// + /// Gets or sets the defined name of the enum, as used in .proto + /// (this name is not used during serialization). + /// + public string Name { get; set; } + } +} diff --git a/Runtime/Protobuf-net/ProtoEnumAttribute.cs.meta b/Runtime/Protobuf-net/ProtoEnumAttribute.cs.meta new file mode 100644 index 0000000..a5cada9 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoEnumAttribute.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 3b3e030ed91e74b49b87bb0cd9acf139 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ProtoException.cs b/Runtime/Protobuf-net/ProtoException.cs new file mode 100644 index 0000000..f502527 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoException.cs @@ -0,0 +1,30 @@ +using System; + +#if PLAT_BINARYFORMATTER && !(COREFX || PROFILE259) +using System.Runtime.Serialization; +#endif +namespace ProtoBuf +{ + /// + /// Indicates an error during serialization/deserialization of a proto stream. + /// +#if PLAT_BINARYFORMATTER && !(COREFX || PROFILE259) + [Serializable] +#endif + public class ProtoException : Exception + { + /// Creates a new ProtoException instance. + public ProtoException() { } + + /// Creates a new ProtoException instance. + public ProtoException(string message) : base(message) { } + + /// Creates a new ProtoException instance. + public ProtoException(string message, Exception innerException) : base(message, innerException) { } + +#if PLAT_BINARYFORMATTER && !(COREFX || PROFILE259) + /// Creates a new ProtoException instance. + protected ProtoException(SerializationInfo info, StreamingContext context) : base(info, context) { } +#endif + } +} diff --git a/Runtime/Protobuf-net/ProtoException.cs.meta b/Runtime/Protobuf-net/ProtoException.cs.meta new file mode 100644 index 0000000..28099e5 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoException.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 8629683d41766534fa00bcb5d1a324e0 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ProtoIgnoreAttribute.cs b/Runtime/Protobuf-net/ProtoIgnoreAttribute.cs new file mode 100644 index 0000000..775674e --- /dev/null +++ b/Runtime/Protobuf-net/ProtoIgnoreAttribute.cs @@ -0,0 +1,40 @@ +using System; + +namespace ProtoBuf +{ + /// + /// Indicates that a member should be excluded from serialization; this + /// is only normally used when using implict fields. + /// + [AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, + AllowMultiple = false, Inherited = true)] + public class ProtoIgnoreAttribute : Attribute { } + + /// + /// Indicates that a member should be excluded from serialization; this + /// is only normally used when using implict fields. This allows + /// ProtoIgnoreAttribute usage + /// even for partial classes where the individual members are not + /// under direct control. + /// + [AttributeUsage(AttributeTargets.Class, + AllowMultiple = true, Inherited = false)] + public sealed class ProtoPartialIgnoreAttribute : ProtoIgnoreAttribute + { + /// + /// Creates a new ProtoPartialIgnoreAttribute instance. + /// + /// Specifies the member to be ignored. + public ProtoPartialIgnoreAttribute(string memberName) + : base() + { + if (string.IsNullOrEmpty(memberName)) throw new ArgumentNullException(nameof(memberName)); + + MemberName = memberName; + } + /// + /// The name of the member to be ignored. + /// + public string MemberName { get; } + } +} diff --git a/Runtime/Protobuf-net/ProtoIgnoreAttribute.cs.meta b/Runtime/Protobuf-net/ProtoIgnoreAttribute.cs.meta new file mode 100644 index 0000000..00afe26 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoIgnoreAttribute.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b012b09d39a7c2445aba79ffee82b117 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ProtoIncludeAttribute.cs b/Runtime/Protobuf-net/ProtoIncludeAttribute.cs new file mode 100644 index 0000000..bb83ef7 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoIncludeAttribute.cs @@ -0,0 +1,60 @@ +using System; +using System.ComponentModel; + +using ProtoBuf.Meta; + +namespace ProtoBuf +{ + /// + /// Indicates the known-types to support for an individual + /// message. This serializes each level in the hierarchy as + /// a nested message to retain wire-compatibility with + /// other protocol-buffer implementations. + /// + [AttributeUsage(AttributeTargets.Class | AttributeTargets.Interface, AllowMultiple = true, Inherited = false)] + public sealed class ProtoIncludeAttribute : Attribute + { + /// + /// Creates a new instance of the ProtoIncludeAttribute. + /// + /// The unique index (within the type) that will identify this data. + /// The additional type to serialize/deserialize. + public ProtoIncludeAttribute(int tag, Type knownType) + : this(tag, knownType == null ? "" : knownType.AssemblyQualifiedName) { } + + /// + /// Creates a new instance of the ProtoIncludeAttribute. + /// + /// The unique index (within the type) that will identify this data. + /// The additional type to serialize/deserialize. + public ProtoIncludeAttribute(int tag, string knownTypeName) + { + if (tag <= 0) throw new ArgumentOutOfRangeException(nameof(tag), "Tags must be positive integers"); + if (string.IsNullOrEmpty(knownTypeName)) throw new ArgumentNullException(nameof(knownTypeName), "Known type cannot be blank"); + Tag = tag; + KnownTypeName = knownTypeName; + } + + /// + /// Gets the unique index (within the type) that will identify this data. + /// + public int Tag { get; } + + /// + /// Gets the additional type to serialize/deserialize. + /// + public string KnownTypeName { get; } + + /// + /// Gets the additional type to serialize/deserialize. + /// + public Type KnownType => TypeModel.ResolveKnownType(KnownTypeName, null, null); + + /// + /// Specifies whether the inherited sype's sub-message should be + /// written with a length-prefix (default), or with group markers. + /// + [DefaultValue(DataFormat.Default)] + public DataFormat DataFormat { get; set; } = DataFormat.Default; + } +} diff --git a/Runtime/Protobuf-net/ProtoIncludeAttribute.cs.meta b/Runtime/Protobuf-net/ProtoIncludeAttribute.cs.meta new file mode 100644 index 0000000..edebcb5 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoIncludeAttribute.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 40d89f2230d5a4f4badf122df4ed9fae +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ProtoMapAttribute.cs b/Runtime/Protobuf-net/ProtoMapAttribute.cs new file mode 100644 index 0000000..e85441a --- /dev/null +++ b/Runtime/Protobuf-net/ProtoMapAttribute.cs @@ -0,0 +1,29 @@ +using System; + +namespace ProtoBuf +{ + /// + /// Controls the formatting of elements in a dictionary, and indicates that + /// "map" rules should be used: duplicates *replace* earlier values, rather + /// than throwing an exception + /// + [AttributeUsage(AttributeTargets.Property | AttributeTargets.Field)] + public class ProtoMapAttribute : Attribute + { + /// + /// Describes the data-format used to store the key + /// + public DataFormat KeyFormat { get; set; } + /// + /// Describes the data-format used to store the value + /// + public DataFormat ValueFormat { get; set; } + + /// + /// Disables "map" handling; dictionaries will use ".Add(key,value)" instead of "[key] = value", + /// which means duplicate keys will cause an exception (instead of retaining the final value); if + /// a proto schema is emitted, it will be produced using "repeated" instead of "map" + /// + public bool DisableMap { get; set; } + } +} diff --git a/Runtime/Protobuf-net/ProtoMapAttribute.cs.meta b/Runtime/Protobuf-net/ProtoMapAttribute.cs.meta new file mode 100644 index 0000000..cf765ae --- /dev/null +++ b/Runtime/Protobuf-net/ProtoMapAttribute.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 2d41a983b561e9043a8ce693aeb9c835 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ProtoMemberAttribute.cs b/Runtime/Protobuf-net/ProtoMemberAttribute.cs new file mode 100644 index 0000000..e5ab896 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoMemberAttribute.cs @@ -0,0 +1,228 @@ +using System; +using System.Reflection; + +namespace ProtoBuf +{ + /// + /// Declares a member to be used in protocol-buffer serialization, using + /// the given Tag. A DataFormat may be used to optimise the serialization + /// format (for instance, using zigzag encoding for negative numbers, or + /// fixed-length encoding for large values. + /// + [AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, + AllowMultiple = false, Inherited = true)] + public class ProtoMemberAttribute : Attribute + , IComparable + , IComparable + + { + /// + /// Compare with another ProtoMemberAttribute for sorting purposes + /// + public int CompareTo(object other) => CompareTo(other as ProtoMemberAttribute); + /// + /// Compare with another ProtoMemberAttribute for sorting purposes + /// + public int CompareTo(ProtoMemberAttribute other) + { + if (other == null) return -1; + if ((object)this == (object)other) return 0; + int result = this.tag.CompareTo(other.tag); + if (result == 0) result = string.CompareOrdinal(this.name, other.name); + return result; + } + + /// + /// Creates a new ProtoMemberAttribute instance. + /// + /// Specifies the unique tag used to identify this member within the type. + public ProtoMemberAttribute(int tag) : this(tag, false) + { } + + internal ProtoMemberAttribute(int tag, bool forced) + { + if (tag <= 0 && !forced) throw new ArgumentOutOfRangeException(nameof(tag)); + this.tag = tag; + } + +#if !NO_RUNTIME + internal MemberInfo Member, BackingMember; + internal bool TagIsPinned; +#endif + /// + /// Gets or sets the original name defined in the .proto; not used + /// during serialization. + /// + public string Name { get { return name; } set { name = value; } } + private string name; + + /// + /// Gets or sets the data-format to be used when encoding this value. + /// + public DataFormat DataFormat { get { return dataFormat; } set { dataFormat = value; } } + private DataFormat dataFormat; + + /// + /// Gets the unique tag used to identify this member within the type. + /// + public int Tag { get { return tag; } } + private int tag; + internal void Rebase(int tag) { this.tag = tag; } + + /// + /// Gets or sets a value indicating whether this member is mandatory. + /// + public bool IsRequired + { + get { return (options & MemberSerializationOptions.Required) == MemberSerializationOptions.Required; } + set + { + if (value) options |= MemberSerializationOptions.Required; + else options &= ~MemberSerializationOptions.Required; + } + } + + /// + /// Gets a value indicating whether this member is packed. + /// This option only applies to list/array data of primitive types (int, double, etc). + /// + public bool IsPacked + { + get { return (options & MemberSerializationOptions.Packed) == MemberSerializationOptions.Packed; } + set + { + if (value) options |= MemberSerializationOptions.Packed; + else options &= ~MemberSerializationOptions.Packed; + } + } + + /// + /// Indicates whether this field should *repace* existing values (the default is false, meaning *append*). + /// This option only applies to list/array data. + /// + public bool OverwriteList + { + get { return (options & MemberSerializationOptions.OverwriteList) == MemberSerializationOptions.OverwriteList; } + set + { + if (value) options |= MemberSerializationOptions.OverwriteList; + else options &= ~MemberSerializationOptions.OverwriteList; + } + } + + /// + /// Enables full object-tracking/full-graph support. + /// + public bool AsReference + { + get { return (options & MemberSerializationOptions.AsReference) == MemberSerializationOptions.AsReference; } + set + { + if (value) options |= MemberSerializationOptions.AsReference; + else options &= ~MemberSerializationOptions.AsReference; + + options |= MemberSerializationOptions.AsReferenceHasValue; + } + } + + internal bool AsReferenceHasValue + { + get { return (options & MemberSerializationOptions.AsReferenceHasValue) == MemberSerializationOptions.AsReferenceHasValue; } + set + { + if (value) options |= MemberSerializationOptions.AsReferenceHasValue; + else options &= ~MemberSerializationOptions.AsReferenceHasValue; + } + } + + /// + /// Embeds the type information into the stream, allowing usage with types not known in advance. + /// + public bool DynamicType + { + get { return (options & MemberSerializationOptions.DynamicType) == MemberSerializationOptions.DynamicType; } + set + { + if (value) options |= MemberSerializationOptions.DynamicType; + else options &= ~MemberSerializationOptions.DynamicType; + } + } + + /// + /// Gets or sets a value indicating whether this member is packed (lists/arrays). + /// + public MemberSerializationOptions Options { get { return options; } set { options = value; } } + private MemberSerializationOptions options; + + + } + + /// + /// Additional (optional) settings that control serialization of members + /// + [Flags] + public enum MemberSerializationOptions + { + /// + /// Default; no additional options + /// + None = 0, + /// + /// Indicates that repeated elements should use packed (length-prefixed) encoding + /// + Packed = 1, + /// + /// Indicates that the given item is required + /// + Required = 2, + /// + /// Enables full object-tracking/full-graph support + /// + AsReference = 4, + /// + /// Embeds the type information into the stream, allowing usage with types not known in advance + /// + DynamicType = 8, + /// + /// Indicates whether this field should *repace* existing values (the default is false, meaning *append*). + /// This option only applies to list/array data. + /// + OverwriteList = 16, + /// + /// Determines whether the types AsReferenceDefault value is used, or whether this member's AsReference should be used + /// + AsReferenceHasValue = 32 + } + + /// + /// Declares a member to be used in protocol-buffer serialization, using + /// the given Tag and MemberName. This allows ProtoMemberAttribute usage + /// even for partial classes where the individual members are not + /// under direct control. + /// A DataFormat may be used to optimise the serialization + /// format (for instance, using zigzag encoding for negative numbers, or + /// fixed-length encoding for large values. + /// + [AttributeUsage(AttributeTargets.Class, + AllowMultiple = true, Inherited = false)] + public sealed class ProtoPartialMemberAttribute : ProtoMemberAttribute + { + /// + /// Creates a new ProtoMemberAttribute instance. + /// + /// Specifies the unique tag used to identify this member within the type. + /// Specifies the member to be serialized. + public ProtoPartialMemberAttribute(int tag, string memberName) + : base(tag) + { +#if !NO_RUNTIME + if (string.IsNullOrEmpty(memberName)) throw new ArgumentNullException(nameof(memberName)); +#endif + this.MemberName = memberName; + } + /// + /// The name of the member to be serialized. + /// + public string MemberName { get; private set; } + } +} diff --git a/Runtime/Protobuf-net/ProtoMemberAttribute.cs.meta b/Runtime/Protobuf-net/ProtoMemberAttribute.cs.meta new file mode 100644 index 0000000..2f3dfc9 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoMemberAttribute.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 262c0823543b1b3499e2b67ca22f4e62 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ProtoReader.cs b/Runtime/Protobuf-net/ProtoReader.cs new file mode 100644 index 0000000..3ea9bf9 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoReader.cs @@ -0,0 +1,1444 @@ + +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using ProtoBuf.Meta; + +namespace ProtoBuf +{ + /// + /// A stateful reader, used to read a protobuf stream. Typical usage would be (sequentially) to call + /// ReadFieldHeader and (after matching the field) an appropriate Read* method. + /// + public sealed class ProtoReader : IDisposable + { + Stream source; + byte[] ioBuffer; + TypeModel model; + int fieldNumber, depth, ioIndex, available; + long position64, blockEnd64, dataRemaining64; + WireType wireType; + bool isFixedLength, internStrings; + private NetObjectCache netCache; + + // this is how many outstanding objects do not currently have + // values for the purposes of reference tracking; we'll default + // to just trapping the root object + // note: objects are trapped (the ref and key mapped) via NoteObject + uint trapCount; // uint is so we can use beq/bne more efficiently than bgt + + /// + /// Gets the number of the field being processed. + /// + public int FieldNumber => fieldNumber; + + /// + /// Indicates the underlying proto serialization format on the wire. + /// + public WireType WireType => wireType; + + /// + /// Creates a new reader against a stream + /// + /// The source stream + /// The model to use for serialization; this can be null, but this will impair the ability to deserialize sub-objects + /// Additional context about this serialization operation + [Obsolete("Please use ProtoReader.Create; this API may be removed in a future version", error: false)] + public ProtoReader(Stream source, TypeModel model, SerializationContext context) + { + + Init(this, source, model, context, TO_EOF); + } + + internal const long TO_EOF = -1; + + /// + /// Gets / sets a flag indicating whether strings should be checked for repetition; if + /// true, any repeated UTF-8 byte sequence will result in the same String instance, rather + /// than a second instance of the same string. Enabled by default. Note that this uses + /// a custom interner - the system-wide string interner is not used. + /// + public bool InternStrings { get { return internStrings; } set { internStrings = value; } } + + /// + /// Creates a new reader against a stream + /// + /// The source stream + /// The model to use for serialization; this can be null, but this will impair the ability to deserialize sub-objects + /// Additional context about this serialization operation + /// The number of bytes to read, or -1 to read until the end of the stream + [Obsolete("Please use ProtoReader.Create; this API may be removed in a future version", error: false)] + public ProtoReader(Stream source, TypeModel model, SerializationContext context, int length) + { + Init(this, source, model, context, length); + } + + /// + /// Creates a new reader against a stream + /// + /// The source stream + /// The model to use for serialization; this can be null, but this will impair the ability to deserialize sub-objects + /// Additional context about this serialization operation + /// The number of bytes to read, or -1 to read until the end of the stream + [Obsolete("Please use ProtoReader.Create; this API may be removed in a future version", error: false)] + public ProtoReader(Stream source, TypeModel model, SerializationContext context, long length) + { + Init(this, source, model, context, length); + } + + private static void Init(ProtoReader reader, Stream source, TypeModel model, SerializationContext context, long length) + { + if (source == null) throw new ArgumentNullException(nameof(source)); + if (!source.CanRead) throw new ArgumentException("Cannot read from stream", nameof(source)); + reader.source = source; + reader.ioBuffer = BufferPool.GetBuffer(); + reader.model = model; + bool isFixedLength = length >= 0; + reader.isFixedLength = isFixedLength; + reader.dataRemaining64 = isFixedLength ? length : 0; + + if (context == null) { context = SerializationContext.Default; } + else { context.Freeze(); } + reader.context = context; + reader.position64 = 0; + reader.available = reader.depth = reader.fieldNumber = reader.ioIndex = 0; + reader.blockEnd64 = long.MaxValue; + reader.internStrings = RuntimeTypeModel.Default.InternStrings; + reader.wireType = WireType.None; + reader.trapCount = 1; + if (reader.netCache == null) reader.netCache = new NetObjectCache(); + } + + private SerializationContext context; + + /// + /// Addition information about this deserialization operation. + /// + public SerializationContext Context => context; + + /// + /// Releases resources used by the reader, but importantly does not Dispose the + /// underlying stream; in many typical use-cases the stream is used for different + /// processes, so it is assumed that the consumer will Dispose their stream separately. + /// + public void Dispose() + { + // importantly, this does **not** own the stream, and does not dispose it + source = null; + model = null; + BufferPool.ReleaseBufferToPool(ref ioBuffer); + if (stringInterner != null) + { + stringInterner.Clear(); + stringInterner = null; + } + if (netCache != null) netCache.Clear(); + } + internal int TryReadUInt32VariantWithoutMoving(bool trimNegative, out uint value) + { + if (available < 10) Ensure(10, false); + if (available == 0) + { + value = 0; + return 0; + } + int readPos = ioIndex; + value = ioBuffer[readPos++]; + if ((value & 0x80) == 0) return 1; + value &= 0x7F; + if (available == 1) throw EoF(this); + + uint chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 7; + if ((chunk & 0x80) == 0) return 2; + if (available == 2) throw EoF(this); + + chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 14; + if ((chunk & 0x80) == 0) return 3; + if (available == 3) throw EoF(this); + + chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 21; + if ((chunk & 0x80) == 0) return 4; + if (available == 4) throw EoF(this); + + chunk = ioBuffer[readPos]; + value |= chunk << 28; // can only use 4 bits from this chunk + if ((chunk & 0xF0) == 0) return 5; + + if (trimNegative // allow for -ve values + && (chunk & 0xF0) == 0xF0 + && available >= 10 + && ioBuffer[++readPos] == 0xFF + && ioBuffer[++readPos] == 0xFF + && ioBuffer[++readPos] == 0xFF + && ioBuffer[++readPos] == 0xFF + && ioBuffer[++readPos] == 0x01) + { + return 10; + } + throw AddErrorData(new OverflowException(), this); + } + + private uint ReadUInt32Variant(bool trimNegative) + { + int read = TryReadUInt32VariantWithoutMoving(trimNegative, out uint value); + if (read > 0) + { + ioIndex += read; + available -= read; + position64 += read; + return value; + } + throw EoF(this); + } + + private bool TryReadUInt32Variant(out uint value) + { + int read = TryReadUInt32VariantWithoutMoving(false, out value); + if (read > 0) + { + ioIndex += read; + available -= read; + position64 += read; + return true; + } + return false; + } + + /// + /// Reads an unsigned 32-bit integer from the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + public uint ReadUInt32() + { + switch (wireType) + { + case WireType.Variant: + return ReadUInt32Variant(false); + case WireType.Fixed32: + if (available < 4) Ensure(4, true); + position64 += 4; + available -= 4; + return ((uint)ioBuffer[ioIndex++]) + | (((uint)ioBuffer[ioIndex++]) << 8) + | (((uint)ioBuffer[ioIndex++]) << 16) + | (((uint)ioBuffer[ioIndex++]) << 24); + case WireType.Fixed64: + ulong val = ReadUInt64(); + checked { return (uint)val; } + default: + throw CreateWireTypeException(); + } + } + + /// + /// Returns the position of the current reader (note that this is not necessarily the same as the position + /// in the underlying stream, if multiple readers are used on the same stream) + /// + public int Position { get { return checked((int)position64); } } + + /// + /// Returns the position of the current reader (note that this is not necessarily the same as the position + /// in the underlying stream, if multiple readers are used on the same stream) + /// + public long LongPosition { get { return position64; } } + internal void Ensure(int count, bool strict) + { + Helpers.DebugAssert(available <= count, "Asking for data without checking first"); + if (count > ioBuffer.Length) + { + BufferPool.ResizeAndFlushLeft(ref ioBuffer, count, ioIndex, available); + ioIndex = 0; + } + else if (ioIndex + count >= ioBuffer.Length) + { + // need to shift the buffer data to the left to make space + Buffer.BlockCopy(ioBuffer, ioIndex, ioBuffer, 0, available); + ioIndex = 0; + } + count -= available; + int writePos = ioIndex + available, bytesRead; + int canRead = ioBuffer.Length - writePos; + if (isFixedLength) + { // throttle it if needed + if (dataRemaining64 < canRead) canRead = (int)dataRemaining64; + } + while (count > 0 && canRead > 0 && (bytesRead = source.Read(ioBuffer, writePos, canRead)) > 0) + { + available += bytesRead; + count -= bytesRead; + canRead -= bytesRead; + writePos += bytesRead; + if (isFixedLength) { dataRemaining64 -= bytesRead; } + } + if (strict && count > 0) + { + throw EoF(this); + } + + } + /// + /// Reads a signed 16-bit integer from the stream: Variant, Fixed32, Fixed64, SignedVariant + /// + public short ReadInt16() + { + checked { return (short)ReadInt32(); } + } + /// + /// Reads an unsigned 16-bit integer from the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + public ushort ReadUInt16() + { + checked { return (ushort)ReadUInt32(); } + } + + /// + /// Reads an unsigned 8-bit integer from the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + public byte ReadByte() + { + checked { return (byte)ReadUInt32(); } + } + + /// + /// Reads a signed 8-bit integer from the stream; supported wire-types: Variant, Fixed32, Fixed64, SignedVariant + /// + public sbyte ReadSByte() + { + checked { return (sbyte)ReadInt32(); } + } + + /// + /// Reads a signed 32-bit integer from the stream; supported wire-types: Variant, Fixed32, Fixed64, SignedVariant + /// + public int ReadInt32() + { + switch (wireType) + { + case WireType.Variant: + return (int)ReadUInt32Variant(true); + case WireType.Fixed32: + if (available < 4) Ensure(4, true); + position64 += 4; + available -= 4; + return ((int)ioBuffer[ioIndex++]) + | (((int)ioBuffer[ioIndex++]) << 8) + | (((int)ioBuffer[ioIndex++]) << 16) + | (((int)ioBuffer[ioIndex++]) << 24); + case WireType.Fixed64: + long l = ReadInt64(); + checked { return (int)l; } + case WireType.SignedVariant: + return Zag(ReadUInt32Variant(true)); + default: + throw CreateWireTypeException(); + } + } + private const long Int64Msb = ((long)1) << 63; + private const int Int32Msb = ((int)1) << 31; + private static int Zag(uint ziggedValue) + { + int value = (int)ziggedValue; + return (-(value & 0x01)) ^ ((value >> 1) & ~ProtoReader.Int32Msb); + } + + private static long Zag(ulong ziggedValue) + { + long value = (long)ziggedValue; + return (-(value & 0x01L)) ^ ((value >> 1) & ~ProtoReader.Int64Msb); + } + /// + /// Reads a signed 64-bit integer from the stream; supported wire-types: Variant, Fixed32, Fixed64, SignedVariant + /// + public long ReadInt64() + { + switch (wireType) + { + case WireType.Variant: + return (long)ReadUInt64Variant(); + case WireType.Fixed32: + return ReadInt32(); + case WireType.Fixed64: + if (available < 8) Ensure(8, true); + position64 += 8; + available -= 8; + +#if NETCOREAPP2_1 + var result = System.Buffers.Binary.BinaryPrimitives.ReadInt64LittleEndian(ioBuffer.AsSpan(ioIndex, 8)); + + ioIndex+= 8; + + return result; +#else + return ((long)ioBuffer[ioIndex++]) + | (((long)ioBuffer[ioIndex++]) << 8) + | (((long)ioBuffer[ioIndex++]) << 16) + | (((long)ioBuffer[ioIndex++]) << 24) + | (((long)ioBuffer[ioIndex++]) << 32) + | (((long)ioBuffer[ioIndex++]) << 40) + | (((long)ioBuffer[ioIndex++]) << 48) + | (((long)ioBuffer[ioIndex++]) << 56); +#endif + case WireType.SignedVariant: + return Zag(ReadUInt64Variant()); + default: + throw CreateWireTypeException(); + } + } + + private int TryReadUInt64VariantWithoutMoving(out ulong value) + { + if (available < 10) Ensure(10, false); + if (available == 0) + { + value = 0; + return 0; + } + int readPos = ioIndex; + value = ioBuffer[readPos++]; + if ((value & 0x80) == 0) return 1; + value &= 0x7F; + if (available == 1) throw EoF(this); + + ulong chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 7; + if ((chunk & 0x80) == 0) return 2; + if (available == 2) throw EoF(this); + + chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 14; + if ((chunk & 0x80) == 0) return 3; + if (available == 3) throw EoF(this); + + chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 21; + if ((chunk & 0x80) == 0) return 4; + if (available == 4) throw EoF(this); + + chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 28; + if ((chunk & 0x80) == 0) return 5; + if (available == 5) throw EoF(this); + + chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 35; + if ((chunk & 0x80) == 0) return 6; + if (available == 6) throw EoF(this); + + chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 42; + if ((chunk & 0x80) == 0) return 7; + if (available == 7) throw EoF(this); + + + chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 49; + if ((chunk & 0x80) == 0) return 8; + if (available == 8) throw EoF(this); + + chunk = ioBuffer[readPos++]; + value |= (chunk & 0x7F) << 56; + if ((chunk & 0x80) == 0) return 9; + if (available == 9) throw EoF(this); + + chunk = ioBuffer[readPos]; + value |= chunk << 63; // can only use 1 bit from this chunk + + if ((chunk & ~(ulong)0x01) != 0) throw AddErrorData(new OverflowException(), this); + return 10; + } + + private ulong ReadUInt64Variant() + { + int read = TryReadUInt64VariantWithoutMoving(out ulong value); + if (read > 0) + { + ioIndex += read; + available -= read; + position64 += read; + return value; + } + throw EoF(this); + } + + private Dictionary stringInterner; + private string Intern(string value) + { + if (value == null) return null; + if (value.Length == 0) return ""; + if (stringInterner == null) + { + stringInterner = new Dictionary + { + { value, value } + }; + } + else if (stringInterner.TryGetValue(value, out string found)) + { + value = found; + } + else + { + stringInterner.Add(value, value); + } + return value; + } + +#if COREFX + static readonly Encoding encoding = Encoding.UTF8; +#else + static readonly UTF8Encoding encoding = new UTF8Encoding(); +#endif + /// + /// Reads a string from the stream (using UTF8); supported wire-types: String + /// + public string ReadString() + { + if (wireType == WireType.String) + { + int bytes = (int)ReadUInt32Variant(false); + if (bytes == 0) return ""; + if (bytes < 0) ThrowInvalidLength(bytes); + if (available < bytes) Ensure(bytes, true); + + string s = encoding.GetString(ioBuffer, ioIndex, bytes); + + if (internStrings) { s = Intern(s); } + available -= bytes; + position64 += bytes; + ioIndex += bytes; + return s; + } + throw CreateWireTypeException(); + } + /// + /// Throws an exception indication that the given value cannot be mapped to an enum. + /// + public void ThrowEnumException(Type type, int value) + { + string desc = type == null ? "" : type.FullName; + throw AddErrorData(new ProtoException("No " + desc + " enum is mapped to the wire-value " + value.ToString()), this); + } + + private void ThrowInvalidLength(long length) + { + throw AddErrorData(new InvalidOperationException("Invalid length: " + length.ToString()), this); + } + + private Exception CreateWireTypeException() + { + return CreateException("Invalid wire-type; this usually means you have over-written a file without truncating or setting the length; see https://stackoverflow.com/q/2152978/23354"); + } + + private Exception CreateException(string message) + { + return AddErrorData(new ProtoException(message), this); + } + /// + /// Reads a double-precision number from the stream; supported wire-types: Fixed32, Fixed64 + /// + public +#if !FEAT_SAFE + unsafe +#endif + double ReadDouble() + { + switch (wireType) + { + case WireType.Fixed32: + return ReadSingle(); + case WireType.Fixed64: + long value = ReadInt64(); +#if FEAT_SAFE + return BitConverter.ToDouble(BitConverter.GetBytes(value), 0); +#else + return *(double*)&value; +#endif + default: + throw CreateWireTypeException(); + } + } + + /// + /// Reads (merges) a sub-message from the stream, internally calling StartSubItem and EndSubItem, and (in between) + /// parsing the message in accordance with the model associated with the reader + /// + public static object ReadObject(object value, int key, ProtoReader reader) + { + return ReadTypedObject(value, key, reader, null); + } + + internal static object ReadTypedObject(object value, int key, ProtoReader reader, Type type) + { + if (reader.model == null) + { + throw AddErrorData(new InvalidOperationException("Cannot deserialize sub-objects unless a model is provided"), reader); + } + SubItemToken token = ProtoReader.StartSubItem(reader); + if (key >= 0) + { + value = reader.model.Deserialize(key, value, reader); + } + else if (type != null && reader.model.TryDeserializeAuxiliaryType(reader, DataFormat.Default, Serializer.ListItemTag, type, ref value, true, false, true, false, null)) + { + // ok + } + else + { + TypeModel.ThrowUnexpectedType(type); + } + ProtoReader.EndSubItem(token, reader); + return value; + } + + /// + /// Makes the end of consuming a nested message in the stream; the stream must be either at the correct EndGroup + /// marker, or all fields of the sub-message must have been consumed (in either case, this means ReadFieldHeader + /// should return zero) + /// + public static void EndSubItem(SubItemToken token, ProtoReader reader) + { + if (reader == null) throw new ArgumentNullException("reader"); + long value64 = token.value64; + switch (reader.wireType) + { + case WireType.EndGroup: + if (value64 >= 0) throw AddErrorData(new ArgumentException("token"), reader); + if (-(int)value64 != reader.fieldNumber) throw reader.CreateException("Wrong group was ended"); // wrong group ended! + reader.wireType = WireType.None; // this releases ReadFieldHeader + reader.depth--; + break; + // case WireType.None: // TODO reinstate once reads reset the wire-type + default: + if (value64 < reader.position64) throw reader.CreateException($"Sub-message not read entirely; expected {value64}, was {reader.position64}"); + if (reader.blockEnd64 != reader.position64 && reader.blockEnd64 != long.MaxValue) + { + throw reader.CreateException("Sub-message not read correctly"); + } + reader.blockEnd64 = value64; + reader.depth--; + break; + /*default: + throw reader.BorkedIt(); */ + } + } + + /// + /// Begins consuming a nested message in the stream; supported wire-types: StartGroup, String + /// + /// The token returned must be help and used when callining EndSubItem + public static SubItemToken StartSubItem(ProtoReader reader) + { + if (reader == null) throw new ArgumentNullException("reader"); + switch (reader.wireType) + { + case WireType.StartGroup: + reader.wireType = WireType.None; // to prevent glitches from double-calling + reader.depth++; + return new SubItemToken((long)(-reader.fieldNumber)); + case WireType.String: + long len = (long)reader.ReadUInt64Variant(); + if (len < 0) reader.ThrowInvalidLength(len); + long lastEnd = reader.blockEnd64; + reader.blockEnd64 = reader.position64 + len; + reader.depth++; + return new SubItemToken(lastEnd); + default: + throw reader.CreateWireTypeException(); // throws + } + } + + /// + /// Reads a field header from the stream, setting the wire-type and retuning the field number. If no + /// more fields are available, then 0 is returned. This methods respects sub-messages. + /// + public int ReadFieldHeader() + { + // at the end of a group the caller must call EndSubItem to release the + // reader (which moves the status to Error, since ReadFieldHeader must + // then be called) + if (blockEnd64 <= position64 || wireType == WireType.EndGroup) { return 0; } + + if (TryReadUInt32Variant(out uint tag) && tag != 0) + { + wireType = (WireType)(tag & 7); + fieldNumber = (int)(tag >> 3); + if (fieldNumber < 1) throw new ProtoException("Invalid field in source data: " + fieldNumber.ToString()); + } + else + { + wireType = WireType.None; + fieldNumber = 0; + } + if (wireType == ProtoBuf.WireType.EndGroup) + { + if (depth > 0) return 0; // spoof an end, but note we still set the field-number + throw new ProtoException("Unexpected end-group in source data; this usually means the source data is corrupt"); + } + return fieldNumber; + } + /// + /// Looks ahead to see whether the next field in the stream is what we expect + /// (typically; what we've just finished reading - for example ot read successive list items) + /// + public bool TryReadFieldHeader(int field) + { + // check for virtual end of stream + if (blockEnd64 <= position64 || wireType == WireType.EndGroup) { return false; } + + int read = TryReadUInt32VariantWithoutMoving(false, out uint tag); + WireType tmpWireType; // need to catch this to exclude (early) any "end group" tokens + if (read > 0 && ((int)tag >> 3) == field + && (tmpWireType = (WireType)(tag & 7)) != WireType.EndGroup) + { + wireType = tmpWireType; + fieldNumber = field; + position64 += read; + ioIndex += read; + available -= read; + return true; + } + return false; + } + + /// + /// Get the TypeModel associated with this reader + /// + public TypeModel Model { get { return model; } } + + /// + /// Compares the streams current wire-type to the hinted wire-type, updating the reader if necessary; for example, + /// a Variant may be updated to SignedVariant. If the hinted wire-type is unrelated then no change is made. + /// + public void Hint(WireType wireType) + { + if (this.wireType == wireType) { } // fine; everything as we expect + else if (((int)wireType & 7) == (int)this.wireType) + { // the underling type is a match; we're customising it with an extension + this.wireType = wireType; + } + // note no error here; we're OK about using alternative data + } + + /// + /// Verifies that the stream's current wire-type is as expected, or a specialized sub-type (for example, + /// SignedVariant) - in which case the current wire-type is updated. Otherwise an exception is thrown. + /// + public void Assert(WireType wireType) + { + if (this.wireType == wireType) { } // fine; everything as we expect + else if (((int)wireType & 7) == (int)this.wireType) + { // the underling type is a match; we're customising it with an extension + this.wireType = wireType; + } + else + { // nope; that is *not* what we were expecting! + throw CreateWireTypeException(); + } + } + + /// + /// Discards the data for the current field. + /// + public void SkipField() + { + switch (wireType) + { + case WireType.Fixed32: + if (available < 4) Ensure(4, true); + available -= 4; + ioIndex += 4; + position64 += 4; + return; + case WireType.Fixed64: + if (available < 8) Ensure(8, true); + available -= 8; + ioIndex += 8; + position64 += 8; + return; + case WireType.String: + long len = (long)ReadUInt64Variant(); + if (len < 0) ThrowInvalidLength(len); + if (len <= available) + { // just jump it! + available -= (int)len; + ioIndex += (int)len; + position64 += len; + return; + } + // everything remaining in the buffer is garbage + position64 += len; // assumes success, but if it fails we're screwed anyway + len -= available; // discount anything we've got to-hand + ioIndex = available = 0; // note that we have no data in the buffer + if (isFixedLength) + { + if (len > dataRemaining64) throw EoF(this); + // else assume we're going to be OK + dataRemaining64 -= len; + } + ProtoReader.Seek(source, len, ioBuffer); + return; + case WireType.Variant: + case WireType.SignedVariant: + ReadUInt64Variant(); // and drop it + return; + case WireType.StartGroup: + int originalFieldNumber = this.fieldNumber; + depth++; // need to satisfy the sanity-checks in ReadFieldHeader + while (ReadFieldHeader() > 0) { SkipField(); } + depth--; + if (wireType == WireType.EndGroup && fieldNumber == originalFieldNumber) + { // we expect to exit in a similar state to how we entered + wireType = ProtoBuf.WireType.None; + return; + } + throw CreateWireTypeException(); + case WireType.None: // treat as explicit errorr + case WireType.EndGroup: // treat as explicit error + default: // treat as implicit error + throw CreateWireTypeException(); + } + } + + /// + /// Reads an unsigned 64-bit integer from the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + public ulong ReadUInt64() + { + switch (wireType) + { + case WireType.Variant: + return ReadUInt64Variant(); + case WireType.Fixed32: + return ReadUInt32(); + case WireType.Fixed64: + if (available < 8) Ensure(8, true); + position64 += 8; + available -= 8; + + return ((ulong)ioBuffer[ioIndex++]) + | (((ulong)ioBuffer[ioIndex++]) << 8) + | (((ulong)ioBuffer[ioIndex++]) << 16) + | (((ulong)ioBuffer[ioIndex++]) << 24) + | (((ulong)ioBuffer[ioIndex++]) << 32) + | (((ulong)ioBuffer[ioIndex++]) << 40) + | (((ulong)ioBuffer[ioIndex++]) << 48) + | (((ulong)ioBuffer[ioIndex++]) << 56); + default: + throw CreateWireTypeException(); + } + } + /// + /// Reads a single-precision number from the stream; supported wire-types: Fixed32, Fixed64 + /// + public +#if !FEAT_SAFE + unsafe +#endif + float ReadSingle() + { + switch (wireType) + { + case WireType.Fixed32: + { + int value = ReadInt32(); +#if FEAT_SAFE + return BitConverter.ToSingle(BitConverter.GetBytes(value), 0); +#else + return *(float*)&value; +#endif + } + case WireType.Fixed64: + { + double value = ReadDouble(); + float f = (float)value; + if (float.IsInfinity(f) && !double.IsInfinity(value)) + { + throw AddErrorData(new OverflowException(), this); + } + return f; + } + default: + throw CreateWireTypeException(); + } + } + + /// + /// Reads a boolean value from the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + /// + public bool ReadBoolean() + { + switch (ReadUInt32()) + { + case 0: return false; + case 1: return true; + default: throw CreateException("Unexpected boolean value"); + } + } + + private static readonly byte[] EmptyBlob = new byte[0]; + /// + /// Reads a byte-sequence from the stream, appending them to an existing byte-sequence (which can be null); supported wire-types: String + /// + public static byte[] AppendBytes(byte[] value, ProtoReader reader) + { + if (reader == null) throw new ArgumentNullException(nameof(reader)); + switch (reader.wireType) + { + case WireType.String: + int len = (int)reader.ReadUInt32Variant(false); + reader.wireType = WireType.None; + if (len == 0) return value ?? EmptyBlob; + if (len < 0) reader.ThrowInvalidLength(len); + int offset; + if (value == null || value.Length == 0) + { + offset = 0; + value = new byte[len]; + } + else + { + offset = value.Length; + byte[] tmp = new byte[value.Length + len]; + Buffer.BlockCopy(value, 0, tmp, 0, value.Length); + value = tmp; + } + // value is now sized with the final length, and (if necessary) + // contains the old data up to "offset" + reader.position64 += len; // assume success + while (len > reader.available) + { + if (reader.available > 0) + { + // copy what we *do* have + Buffer.BlockCopy(reader.ioBuffer, reader.ioIndex, value, offset, reader.available); + len -= reader.available; + offset += reader.available; + reader.ioIndex = reader.available = 0; // we've drained the buffer + } + // now refill the buffer (without overflowing it) + int count = len > reader.ioBuffer.Length ? reader.ioBuffer.Length : len; + if (count > 0) reader.Ensure(count, true); + } + // at this point, we know that len <= available + if (len > 0) + { // still need data, but we have enough buffered + Buffer.BlockCopy(reader.ioBuffer, reader.ioIndex, value, offset, len); + reader.ioIndex += len; + reader.available -= len; + } + return value; + case WireType.Variant: + return new byte[0]; + default: + throw reader.CreateWireTypeException(); + } + } + + //static byte[] ReadBytes(Stream stream, int length) + //{ + // if (stream == null) throw new ArgumentNullException("stream"); + // if (length < 0) throw new ArgumentOutOfRangeException("length"); + // byte[] buffer = new byte[length]; + // int offset = 0, read; + // while (length > 0 && (read = stream.Read(buffer, offset, length)) > 0) + // { + // length -= read; + // } + // if (length > 0) throw EoF(null); + // return buffer; + //} + private static int ReadByteOrThrow(Stream source) + { + int val = source.ReadByte(); + if (val < 0) throw EoF(null); + return val; + } + + /// + /// Reads the length-prefix of a message from a stream without buffering additional data, allowing a fixed-length + /// reader to be created. + /// + public static int ReadLengthPrefix(Stream source, bool expectHeader, PrefixStyle style, out int fieldNumber) + => ReadLengthPrefix(source, expectHeader, style, out fieldNumber, out int bytesRead); + + /// + /// Reads a little-endian encoded integer. An exception is thrown if the data is not all available. + /// + public static int DirectReadLittleEndianInt32(Stream source) + { + return ReadByteOrThrow(source) + | (ReadByteOrThrow(source) << 8) + | (ReadByteOrThrow(source) << 16) + | (ReadByteOrThrow(source) << 24); + } + + /// + /// Reads a big-endian encoded integer. An exception is thrown if the data is not all available. + /// + public static int DirectReadBigEndianInt32(Stream source) + { + return (ReadByteOrThrow(source) << 24) + | (ReadByteOrThrow(source) << 16) + | (ReadByteOrThrow(source) << 8) + | ReadByteOrThrow(source); + } + + /// + /// Reads a varint encoded integer. An exception is thrown if the data is not all available. + /// + public static int DirectReadVarintInt32(Stream source) + { + int bytes = TryReadUInt64Variant(source, out ulong val); + if (bytes <= 0) throw EoF(null); + return checked((int)val); + } + + /// + /// Reads a string (of a given lenth, in bytes) directly from the source into a pre-existing buffer. An exception is thrown if the data is not all available. + /// + public static void DirectReadBytes(Stream source, byte[] buffer, int offset, int count) + { + int read; + if (source == null) throw new ArgumentNullException("source"); + while (count > 0 && (read = source.Read(buffer, offset, count)) > 0) + { + count -= read; + offset += read; + } + if (count > 0) throw EoF(null); + } + + /// + /// Reads a given number of bytes directly from the source. An exception is thrown if the data is not all available. + /// + public static byte[] DirectReadBytes(Stream source, int count) + { + byte[] buffer = new byte[count]; + DirectReadBytes(source, buffer, 0, count); + return buffer; + } + + /// + /// Reads a string (of a given lenth, in bytes) directly from the source. An exception is thrown if the data is not all available. + /// + public static string DirectReadString(Stream source, int length) + { + byte[] buffer = new byte[length]; + DirectReadBytes(source, buffer, 0, length); + return Encoding.UTF8.GetString(buffer, 0, length); + } + + /// + /// Reads the length-prefix of a message from a stream without buffering additional data, allowing a fixed-length + /// reader to be created. + /// + public static int ReadLengthPrefix(Stream source, bool expectHeader, PrefixStyle style, out int fieldNumber, out int bytesRead) + { + if (style == PrefixStyle.None) + { + bytesRead = fieldNumber = 0; + return int.MaxValue; // avoid the long.maxvalue causing overflow + } + long len64 = ReadLongLengthPrefix(source, expectHeader, style, out fieldNumber, out bytesRead); + return checked((int)len64); + } + + /// + /// Reads the length-prefix of a message from a stream without buffering additional data, allowing a fixed-length + /// reader to be created. + /// + public static long ReadLongLengthPrefix(Stream source, bool expectHeader, PrefixStyle style, out int fieldNumber, out int bytesRead) + { + fieldNumber = 0; + switch (style) + { + case PrefixStyle.None: + bytesRead = 0; + return long.MaxValue; + case PrefixStyle.Base128: + ulong val; + int tmpBytesRead; + bytesRead = 0; + if (expectHeader) + { + tmpBytesRead = ProtoReader.TryReadUInt64Variant(source, out val); + bytesRead += tmpBytesRead; + if (tmpBytesRead > 0) + { + if ((val & 7) != (uint)WireType.String) + { // got a header, but it isn't a string + throw new InvalidOperationException(); + } + fieldNumber = (int)(val >> 3); + tmpBytesRead = ProtoReader.TryReadUInt64Variant(source, out val); + bytesRead += tmpBytesRead; + if (bytesRead == 0) + { // got a header, but no length + throw EoF(null); + } + return (long)val; + } + else + { // no header + bytesRead = 0; + return -1; + } + } + // check for a length + tmpBytesRead = ProtoReader.TryReadUInt64Variant(source, out val); + bytesRead += tmpBytesRead; + return bytesRead < 0 ? -1 : (long)val; + + case PrefixStyle.Fixed32: + { + int b = source.ReadByte(); + if (b < 0) + { + bytesRead = 0; + return -1; + } + bytesRead = 4; + return b + | (ReadByteOrThrow(source) << 8) + | (ReadByteOrThrow(source) << 16) + | (ReadByteOrThrow(source) << 24); + } + case PrefixStyle.Fixed32BigEndian: + { + int b = source.ReadByte(); + if (b < 0) + { + bytesRead = 0; + return -1; + } + bytesRead = 4; + return (b << 24) + | (ReadByteOrThrow(source) << 16) + | (ReadByteOrThrow(source) << 8) + | ReadByteOrThrow(source); + } + default: + throw new ArgumentOutOfRangeException("style"); + } + } + + /// The number of bytes consumed; 0 if no data available + private static int TryReadUInt64Variant(Stream source, out ulong value) + { + value = 0; + int b = source.ReadByte(); + if (b < 0) { return 0; } + value = (uint)b; + if ((value & 0x80) == 0) { return 1; } + value &= 0x7F; + int bytesRead = 1, shift = 7; + while (bytesRead < 9) + { + b = source.ReadByte(); + if (b < 0) throw EoF(null); + value |= ((ulong)b & 0x7F) << shift; + shift += 7; + bytesRead++; + + if ((b & 0x80) == 0) return bytesRead; + } + b = source.ReadByte(); + if (b < 0) throw EoF(null); + if ((b & 1) == 0) // only use 1 bit from the last byte + { + value |= ((ulong)b & 0x7F) << shift; + return ++bytesRead; + } + throw new OverflowException(); + } + + internal static void Seek(Stream source, long count, byte[] buffer) + { + if (source.CanSeek) + { + source.Seek(count, SeekOrigin.Current); + count = 0; + } + else if (buffer != null) + { + int bytesRead; + while (count > buffer.Length && (bytesRead = source.Read(buffer, 0, buffer.Length)) > 0) + { + count -= bytesRead; + } + while (count > 0 && (bytesRead = source.Read(buffer, 0, (int)count)) > 0) + { + count -= bytesRead; + } + } + else // borrow a buffer + { + buffer = BufferPool.GetBuffer(); + try + { + int bytesRead; + while (count > buffer.Length && (bytesRead = source.Read(buffer, 0, buffer.Length)) > 0) + { + count -= bytesRead; + } + while (count > 0 && (bytesRead = source.Read(buffer, 0, (int)count)) > 0) + { + count -= bytesRead; + } + } + finally + { + BufferPool.ReleaseBufferToPool(ref buffer); + } + } + if (count > 0) throw EoF(null); + } + internal static Exception AddErrorData(Exception exception, ProtoReader source) + { +#if !CF && !PORTABLE + if (exception != null && source != null && !exception.Data.Contains("protoSource")) + { + exception.Data.Add("protoSource", string.Format("tag={0}; wire-type={1}; offset={2}; depth={3}", + source.fieldNumber, source.wireType, source.position64, source.depth)); + } +#endif + return exception; + } + + private static Exception EoF(ProtoReader source) + { + return AddErrorData(new EndOfStreamException(), source); + } + + /// + /// Copies the current field into the instance as extension data + /// + public void AppendExtensionData(IExtensible instance) + { + if (instance == null) throw new ArgumentNullException(nameof(instance)); + IExtension extn = instance.GetExtensionObject(true); + bool commit = false; + // unusually we *don't* want "using" here; the "finally" does that, with + // the extension object being responsible for disposal etc + Stream dest = extn.BeginAppend(); + try + { + //TODO: replace this with stream-based, buffered raw copying + using (ProtoWriter writer = ProtoWriter.Create(dest, model, null)) + { + AppendExtensionField(writer); + writer.Close(); + } + commit = true; + } + finally { extn.EndAppend(dest, commit); } + } + + private void AppendExtensionField(ProtoWriter writer) + { + //TODO: replace this with stream-based, buffered raw copying + ProtoWriter.WriteFieldHeader(fieldNumber, wireType, writer); + switch (wireType) + { + case WireType.Fixed32: + ProtoWriter.WriteInt32(ReadInt32(), writer); + return; + case WireType.Variant: + case WireType.SignedVariant: + case WireType.Fixed64: + ProtoWriter.WriteInt64(ReadInt64(), writer); + return; + case WireType.String: + ProtoWriter.WriteBytes(AppendBytes(null, this), writer); + return; + case WireType.StartGroup: + SubItemToken readerToken = StartSubItem(this), + writerToken = ProtoWriter.StartSubItem(null, writer); + while (ReadFieldHeader() > 0) { AppendExtensionField(writer); } + EndSubItem(readerToken, this); + ProtoWriter.EndSubItem(writerToken, writer); + return; + case WireType.None: // treat as explicit errorr + case WireType.EndGroup: // treat as explicit error + default: // treat as implicit error + throw CreateWireTypeException(); + } + } + + /// + /// Indicates whether the reader still has data remaining in the current sub-item, + /// additionally setting the wire-type for the next field if there is more data. + /// This is used when decoding packed data. + /// + public static bool HasSubValue(ProtoBuf.WireType wireType, ProtoReader source) + { + if (source == null) throw new ArgumentNullException("source"); + // check for virtual end of stream + if (source.blockEnd64 <= source.position64 || wireType == WireType.EndGroup) { return false; } + source.wireType = wireType; + return true; + } + + internal int GetTypeKey(ref Type type) + { + return model.GetKey(ref type); + } + + internal NetObjectCache NetCache => netCache; + + internal Type DeserializeType(string value) + { + return TypeModel.DeserializeType(model, value); + } + + internal void SetRootObject(object value) + { + netCache.SetKeyedObject(NetObjectCache.Root, value); + trapCount--; + } + + /// + /// Utility method, not intended for public use; this helps maintain the root object is complex scenarios + /// + public static void NoteObject(object value, ProtoReader reader) + { + if (reader == null) throw new ArgumentNullException("reader"); + if (reader.trapCount != 0) + { + reader.netCache.RegisterTrappedObject(value); + reader.trapCount--; + } + } + + /// + /// Reads a Type from the stream, using the model's DynamicTypeFormatting if appropriate; supported wire-types: String + /// + public Type ReadType() + { + return TypeModel.DeserializeType(model, ReadString()); + } + + internal void TrapNextObject(int newObjectKey) + { + trapCount++; + netCache.SetKeyedObject(newObjectKey, null); // use null as a temp + } + + internal void CheckFullyConsumed() + { + if (isFixedLength) + { + if (dataRemaining64 != 0) throw new ProtoException("Incorrect number of bytes consumed"); + } + else + { + if (available != 0) throw new ProtoException("Unconsumed data left in the buffer; this suggests corrupt input"); + } + } + + /// + /// Merge two objects using the details from the current reader; this is used to change the type + /// of objects when an inheritance relationship is discovered later than usual during deserilazation. + /// + public static object Merge(ProtoReader parent, object from, object to) + { + if (parent == null) throw new ArgumentNullException("parent"); + TypeModel model = parent.Model; + SerializationContext ctx = parent.Context; + if (model == null) throw new InvalidOperationException("Types cannot be merged unless a type-model has been specified"); + using (var ms = new MemoryStream()) + { + model.Serialize(ms, from, ctx); + ms.Position = 0; + return model.Deserialize(ms, to, null); + } + } + + #region RECYCLER + + internal static ProtoReader Create(Stream source, TypeModel model, SerializationContext context, int len) + => Create(source, model, context, (long)len); + /// + /// Creates a new reader against a stream + /// + /// The source stream + /// The model to use for serialization; this can be null, but this will impair the ability to deserialize sub-objects + /// Additional context about this serialization operation + /// The number of bytes to read, or -1 to read until the end of the stream + public static ProtoReader Create(Stream source, TypeModel model, SerializationContext context = null, long length = TO_EOF) + { + ProtoReader reader = GetRecycled(); + if (reader == null) + { +#pragma warning disable CS0618 + return new ProtoReader(source, model, context, length); +#pragma warning restore CS0618 + } + Init(reader, source, model, context, length); + return reader; + } + +#if !PLAT_NO_THREADSTATIC + [ThreadStatic] + private static ProtoReader lastReader; + + private static ProtoReader GetRecycled() + { + ProtoReader tmp = lastReader; + lastReader = null; + return tmp; + } + internal static void Recycle(ProtoReader reader) + { + if (reader != null) + { + reader.Dispose(); + lastReader = reader; + } + } +#elif !PLAT_NO_INTERLOCKED + private static object lastReader; + private static ProtoReader GetRecycled() + { + return (ProtoReader)System.Threading.Interlocked.Exchange(ref lastReader, null); + } + internal static void Recycle(ProtoReader reader) + { + if(reader != null) + { + reader.Dispose(); + System.Threading.Interlocked.Exchange(ref lastReader, reader); + } + } +#else + private static readonly object recycleLock = new object(); + private static ProtoReader lastReader; + private static ProtoReader GetRecycled() + { + lock(recycleLock) + { + ProtoReader tmp = lastReader; + lastReader = null; + return tmp; + } + } + internal static void Recycle(ProtoReader reader) + { + if(reader != null) + { + reader.Dispose(); + lock(recycleLock) + { + lastReader = reader; + } + } + } +#endif + + #endregion + } +} diff --git a/Runtime/Protobuf-net/ProtoReader.cs.meta b/Runtime/Protobuf-net/ProtoReader.cs.meta new file mode 100644 index 0000000..0826a16 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoReader.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: bd9c8ee218e18b14b9058926b6bbc8fe +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ProtoWriter.cs b/Runtime/Protobuf-net/ProtoWriter.cs new file mode 100644 index 0000000..23fa42d --- /dev/null +++ b/Runtime/Protobuf-net/ProtoWriter.cs @@ -0,0 +1,1003 @@ +using System; +using System.IO; +using System.Text; +using ProtoBuf.Meta; + +namespace ProtoBuf +{ + /// + /// Represents an output stream for writing protobuf data. + /// + /// Why is the API backwards (static methods with writer arguments)? + /// See: http://marcgravell.blogspot.com/2010/03/last-will-be-first-and-first-will-be.html + /// + public sealed class ProtoWriter : IDisposable + { + private Stream dest; + TypeModel model; + /// + /// Write an encapsulated sub-object, using the supplied unique key (reprasenting a type). + /// + /// The object to write. + /// The key that uniquely identifies the type within the model. + /// The destination. + public static void WriteObject(object value, int key, ProtoWriter writer) + { + if (writer == null) throw new ArgumentNullException("writer"); + if (writer.model == null) + { + throw new InvalidOperationException("Cannot serialize sub-objects unless a model is provided"); + } + + SubItemToken token = StartSubItem(value, writer); + if (key >= 0) + { + writer.model.Serialize(key, value, writer); + } + else if (writer.model != null && writer.model.TrySerializeAuxiliaryType(writer, value.GetType(), DataFormat.Default, Serializer.ListItemTag, value, false, null)) + { + // all ok + } + else + { + TypeModel.ThrowUnexpectedType(value.GetType()); + } + + EndSubItem(token, writer); + } + /// + /// Write an encapsulated sub-object, using the supplied unique key (reprasenting a type) - but the + /// caller is asserting that this relationship is non-recursive; no recursion check will be + /// performed. + /// + /// The object to write. + /// The key that uniquely identifies the type within the model. + /// The destination. + public static void WriteRecursionSafeObject(object value, int key, ProtoWriter writer) + { + if (writer == null) throw new ArgumentNullException(nameof(writer)); + if (writer.model == null) + { + throw new InvalidOperationException("Cannot serialize sub-objects unless a model is provided"); + } + SubItemToken token = StartSubItem(null, writer); + writer.model.Serialize(key, value, writer); + EndSubItem(token, writer); + } + + internal static void WriteObject(object value, int key, ProtoWriter writer, PrefixStyle style, int fieldNumber) + { + if (writer.model == null) + { + throw new InvalidOperationException("Cannot serialize sub-objects unless a model is provided"); + } + if (writer.wireType != WireType.None) throw ProtoWriter.CreateException(writer); + + switch (style) + { + case PrefixStyle.Base128: + writer.wireType = WireType.String; + writer.fieldNumber = fieldNumber; + if (fieldNumber > 0) WriteHeaderCore(fieldNumber, WireType.String, writer); + break; + case PrefixStyle.Fixed32: + case PrefixStyle.Fixed32BigEndian: + writer.fieldNumber = 0; + writer.wireType = WireType.Fixed32; + break; + default: + throw new ArgumentOutOfRangeException("style"); + } + SubItemToken token = StartSubItem(value, writer, true); + if (key < 0) + { + if (!writer.model.TrySerializeAuxiliaryType(writer, value.GetType(), DataFormat.Default, Serializer.ListItemTag, value, false, null)) + { + TypeModel.ThrowUnexpectedType(value.GetType()); + } + } + else + { + writer.model.Serialize(key, value, writer); + } + EndSubItem(token, writer, style); + } + + internal int GetTypeKey(ref Type type) + { + return model.GetKey(ref type); + } + + private readonly NetObjectCache netCache = new NetObjectCache(); + internal NetObjectCache NetCache => netCache; + + private int fieldNumber, flushLock; + WireType wireType; + internal WireType WireType { get { return wireType; } } + /// + /// Writes a field-header, indicating the format of the next data we plan to write. + /// + public static void WriteFieldHeader(int fieldNumber, WireType wireType, ProtoWriter writer) + { + if (writer == null) throw new ArgumentNullException("writer"); + if (writer.wireType != WireType.None) throw new InvalidOperationException("Cannot write a " + wireType.ToString() + + " header until the " + writer.wireType.ToString() + " data has been written"); + if (fieldNumber < 0) throw new ArgumentOutOfRangeException("fieldNumber"); +#if DEBUG + switch (wireType) + { // validate requested header-type + case WireType.Fixed32: + case WireType.Fixed64: + case WireType.String: + case WireType.StartGroup: + case WireType.SignedVariant: + case WireType.Variant: + break; // fine + case WireType.None: + case WireType.EndGroup: + default: + throw new ArgumentException("Invalid wire-type: " + wireType.ToString(), "wireType"); + } +#endif + if (writer.packedFieldNumber == 0) + { + writer.fieldNumber = fieldNumber; + writer.wireType = wireType; + WriteHeaderCore(fieldNumber, wireType, writer); + } + else if (writer.packedFieldNumber == fieldNumber) + { // we'll set things up, but note we *don't* actually write the header here + switch (wireType) + { + case WireType.Fixed32: + case WireType.Fixed64: + case WireType.Variant: + case WireType.SignedVariant: + break; // fine + default: + throw new InvalidOperationException("Wire-type cannot be encoded as packed: " + wireType.ToString()); + } + writer.fieldNumber = fieldNumber; + writer.wireType = wireType; + } + else + { + throw new InvalidOperationException("Field mismatch during packed encoding; expected " + writer.packedFieldNumber.ToString() + " but received " + fieldNumber.ToString()); + } + } + internal static void WriteHeaderCore(int fieldNumber, WireType wireType, ProtoWriter writer) + { + uint header = (((uint)fieldNumber) << 3) + | (((uint)wireType) & 7); + WriteUInt32Variant(header, writer); + } + + /// + /// Writes a byte-array to the stream; supported wire-types: String + /// + public static void WriteBytes(byte[] data, ProtoWriter writer) + { + if (data == null) throw new ArgumentNullException(nameof(data)); + ProtoWriter.WriteBytes(data, 0, data.Length, writer); + } + /// + /// Writes a byte-array to the stream; supported wire-types: String + /// + public static void WriteBytes(byte[] data, int offset, int length, ProtoWriter writer) + { + if (data == null) throw new ArgumentNullException(nameof(data)); + if (writer == null) throw new ArgumentNullException(nameof(writer)); + switch (writer.wireType) + { + case WireType.Fixed32: + if (length != 4) throw new ArgumentException(nameof(length)); + goto CopyFixedLength; // ugly but effective + case WireType.Fixed64: + if (length != 8) throw new ArgumentException(nameof(length)); + goto CopyFixedLength; // ugly but effective + case WireType.String: + WriteUInt32Variant((uint)length, writer); + writer.wireType = WireType.None; + if (length == 0) return; + if (writer.flushLock != 0 || length <= writer.ioBuffer.Length) // write to the buffer + { + goto CopyFixedLength; // ugly but effective + } + // writing data that is bigger than the buffer (and the buffer + // isn't currently locked due to a sub-object needing the size backfilled) + Flush(writer); // commit any existing data from the buffer + // now just write directly to the underlying stream + writer.dest.Write(data, offset, length); + writer.position64 += length; // since we've flushed offset etc is 0, and remains + // zero since we're writing directly to the stream + return; + } + throw CreateException(writer); + CopyFixedLength: // no point duplicating this lots of times, and don't really want another stackframe + DemandSpace(length, writer); + Buffer.BlockCopy(data, offset, writer.ioBuffer, writer.ioIndex, length); + IncrementedAndReset(length, writer); + } + private static void CopyRawFromStream(Stream source, ProtoWriter writer) + { + byte[] buffer = writer.ioBuffer; + int space = buffer.Length - writer.ioIndex, bytesRead = 1; // 1 here to spoof case where already full + + // try filling the buffer first + while (space > 0 && (bytesRead = source.Read(buffer, writer.ioIndex, space)) > 0) + { + writer.ioIndex += bytesRead; + writer.position64 += bytesRead; + space -= bytesRead; + } + if (bytesRead <= 0) return; // all done using just the buffer; stream exhausted + + // at this point the stream still has data, but buffer is full; + if (writer.flushLock == 0) + { + // flush the buffer and write to the underlying stream instead + Flush(writer); + while ((bytesRead = source.Read(buffer, 0, buffer.Length)) > 0) + { + writer.dest.Write(buffer, 0, bytesRead); + writer.position64 += bytesRead; + } + } + else + { + do + { + // need more space; resize (double) as necessary, + // requesting a reasonable minimum chunk each time + // (128 is the minimum; there may actually be much + // more space than this in the buffer) + DemandSpace(128, writer); + if ((bytesRead = source.Read(writer.ioBuffer, writer.ioIndex, + writer.ioBuffer.Length - writer.ioIndex)) <= 0) break; + writer.position64 += bytesRead; + writer.ioIndex += bytesRead; + } while (true); + } + + } + private static void IncrementedAndReset(int length, ProtoWriter writer) + { + Helpers.DebugAssert(length >= 0); + writer.ioIndex += length; + writer.position64 += length; + writer.wireType = WireType.None; + } + int depth = 0; + const int RecursionCheckDepth = 25; + /// + /// Indicates the start of a nested record. + /// + /// The instance to write. + /// The destination. + /// A token representing the state of the stream; this token is given to EndSubItem. + public static SubItemToken StartSubItem(object instance, ProtoWriter writer) + { + return StartSubItem(instance, writer, false); + } + + MutableList recursionStack; + private void CheckRecursionStackAndPush(object instance) + { + int hitLevel; + if (recursionStack == null) { recursionStack = new MutableList(); } + else if (instance != null && (hitLevel = recursionStack.IndexOfReference(instance)) >= 0) + { +#if DEBUG + Helpers.DebugWriteLine("Stack:"); + foreach (object obj in recursionStack) + { + Helpers.DebugWriteLine(obj == null ? "" : obj.ToString()); + } + Helpers.DebugWriteLine(instance == null ? "" : instance.ToString()); +#endif + throw new ProtoException("Possible recursion detected (offset: " + (recursionStack.Count - hitLevel).ToString() + " level(s)): " + instance.ToString()); + } + recursionStack.Add(instance); + } + private void PopRecursionStack() { recursionStack.RemoveLast(); } + + private static SubItemToken StartSubItem(object instance, ProtoWriter writer, bool allowFixed) + { + if (writer == null) throw new ArgumentNullException("writer"); + if (++writer.depth > RecursionCheckDepth) + { + writer.CheckRecursionStackAndPush(instance); + } + if (writer.packedFieldNumber != 0) throw new InvalidOperationException("Cannot begin a sub-item while performing packed encoding"); + switch (writer.wireType) + { + case WireType.StartGroup: + writer.wireType = WireType.None; + return new SubItemToken((long)(-writer.fieldNumber)); + case WireType.String: +#if DEBUG + if (writer.model != null && writer.model.ForwardsOnly) + { + throw new ProtoException("Should not be buffering data: " + instance ?? "(null)"); + } +#endif + writer.wireType = WireType.None; + DemandSpace(32, writer); // make some space in anticipation... + writer.flushLock++; + writer.position64++; + return new SubItemToken((long)(writer.ioIndex++)); // leave 1 space (optimistic) for length + case WireType.Fixed32: + { + if (!allowFixed) throw CreateException(writer); + DemandSpace(32, writer); // make some space in anticipation... + writer.flushLock++; + SubItemToken token = new SubItemToken((long)writer.ioIndex); + ProtoWriter.IncrementedAndReset(4, writer); // leave 4 space (rigid) for length + return token; + } + default: + throw CreateException(writer); + } + } + + /// + /// Indicates the end of a nested record. + /// + /// The token obtained from StartubItem. + /// The destination. + public static void EndSubItem(SubItemToken token, ProtoWriter writer) + { + EndSubItem(token, writer, PrefixStyle.Base128); + } + private static void EndSubItem(SubItemToken token, ProtoWriter writer, PrefixStyle style) + { + if (writer == null) throw new ArgumentNullException("writer"); + if (writer.wireType != WireType.None) { throw CreateException(writer); } + int value = (int)token.value64; + if (writer.depth <= 0) throw CreateException(writer); + if (writer.depth-- > RecursionCheckDepth) + { + writer.PopRecursionStack(); + } + writer.packedFieldNumber = 0; // ending the sub-item always wipes packed encoding + if (value < 0) + { // group - very simple append + WriteHeaderCore(-value, WireType.EndGroup, writer); + writer.wireType = WireType.None; + return; + } + + // so we're backfilling the length into an existing sequence + int len; + switch (style) + { + case PrefixStyle.Fixed32: + len = (int)((writer.ioIndex - value) - 4); + ProtoWriter.WriteInt32ToBuffer(len, writer.ioBuffer, value); + break; + case PrefixStyle.Fixed32BigEndian: + len = (int)((writer.ioIndex - value) - 4); + byte[] buffer = writer.ioBuffer; + ProtoWriter.WriteInt32ToBuffer(len, buffer, value); + // and swap the byte order + byte b = buffer[value]; + buffer[value] = buffer[value + 3]; + buffer[value + 3] = b; + b = buffer[value + 1]; + buffer[value + 1] = buffer[value + 2]; + buffer[value + 2] = b; + break; + case PrefixStyle.Base128: + // string - complicated because we only reserved one byte; + // if the prefix turns out to need more than this then + // we need to shuffle the existing data + len = (int)((writer.ioIndex - value) - 1); + int offset = 0; + uint tmp = (uint)len; + while ((tmp >>= 7) != 0) offset++; + if (offset == 0) + { + writer.ioBuffer[value] = (byte)(len & 0x7F); + } + else + { + DemandSpace(offset, writer); + byte[] blob = writer.ioBuffer; + Buffer.BlockCopy(blob, value + 1, blob, value + 1 + offset, len); + tmp = (uint)len; + do + { + blob[value++] = (byte)((tmp & 0x7F) | 0x80); + } while ((tmp >>= 7) != 0); + blob[value - 1] = (byte)(blob[value - 1] & ~0x80); + writer.position64 += offset; + writer.ioIndex += offset; + } + break; + default: + throw new ArgumentOutOfRangeException("style"); + } + // and this object is no longer a blockage - also flush if sensible + const int ADVISORY_FLUSH_SIZE = 1024; + if (--writer.flushLock == 0 && writer.ioIndex >= ADVISORY_FLUSH_SIZE) + { + ProtoWriter.Flush(writer); + } + + } + + /// + /// Creates a new writer against a stream + /// + /// The destination stream + /// The model to use for serialization; this can be null, but this will impair the ability to serialize sub-objects + /// Additional context about this serialization operation + public static ProtoWriter Create(Stream dest, TypeModel model, SerializationContext context = null) +#pragma warning disable CS0618 + => new ProtoWriter(dest, model, context); +#pragma warning restore CS0618 + + /// + /// Creates a new writer against a stream + /// + /// The destination stream + /// The model to use for serialization; this can be null, but this will impair the ability to serialize sub-objects + /// Additional context about this serialization operation + [Obsolete("Please use ProtoWriter.Create; this API may be removed in a future version", error: false)] + public ProtoWriter(Stream dest, TypeModel model, SerializationContext context) + { + if (dest == null) throw new ArgumentNullException("dest"); + if (!dest.CanWrite) throw new ArgumentException("Cannot write to stream", "dest"); + //if (model == null) throw new ArgumentNullException("model"); + this.dest = dest; + this.ioBuffer = BufferPool.GetBuffer(); + this.model = model; + this.wireType = WireType.None; + if (context == null) { context = SerializationContext.Default; } + else { context.Freeze(); } + this.context = context; + + } + + private readonly SerializationContext context; + /// + /// Addition information about this serialization operation. + /// + public SerializationContext Context => context; + + void IDisposable.Dispose() + { + Dispose(); + } + + private void Dispose() + { // importantly, this does **not** own the stream, and does not dispose it + if (dest != null) + { + Flush(this); + dest = null; + } + model = null; + BufferPool.ReleaseBufferToPool(ref ioBuffer); + } + + private byte[] ioBuffer; + private int ioIndex; + // note that this is used by some of the unit tests and should not be removed + internal static long GetLongPosition(ProtoWriter writer) { return writer.position64; } + internal static int GetPosition(ProtoWriter writer) { return checked((int)writer.position64); } + private long position64; + private static void DemandSpace(int required, ProtoWriter writer) + { + // check for enough space + if ((writer.ioBuffer.Length - writer.ioIndex) < required) + { + TryFlushOrResize(required, writer); + } + } + + private static void TryFlushOrResize(int required, ProtoWriter writer) + { + if (writer.flushLock == 0) + { + Flush(writer); // try emptying the buffer + if ((writer.ioBuffer.Length - writer.ioIndex) >= required) return; + } + + // either can't empty the buffer, or that didn't help; need more space + BufferPool.ResizeAndFlushLeft(ref writer.ioBuffer, required + writer.ioIndex, 0, writer.ioIndex); + } + + /// + /// Flushes data to the underlying stream, and releases any resources. The underlying stream is *not* disposed + /// by this operation. + /// + public void Close() + { + if (depth != 0 || flushLock != 0) throw new InvalidOperationException("Unable to close stream in an incomplete state"); + Dispose(); + } + + internal void CheckDepthFlushlock() + { + if (depth != 0 || flushLock != 0) throw new InvalidOperationException("The writer is in an incomplete state"); + } + + /// + /// Get the TypeModel associated with this writer + /// + public TypeModel Model => model; + + /// + /// Writes any buffered data (if possible) to the underlying stream. + /// + /// The writer to flush + /// It is not always possible to fully flush, since some sequences + /// may require values to be back-filled into the byte-stream. + internal static void Flush(ProtoWriter writer) + { + if (writer.flushLock == 0 && writer.ioIndex != 0) + { + writer.dest.Write(writer.ioBuffer, 0, writer.ioIndex); + writer.ioIndex = 0; + } + } + + /// + /// Writes an unsigned 32-bit integer to the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + private static void WriteUInt32Variant(uint value, ProtoWriter writer) + { + DemandSpace(5, writer); + int count = 0; + do + { + writer.ioBuffer[writer.ioIndex++] = (byte)((value & 0x7F) | 0x80); + count++; + } while ((value >>= 7) != 0); + writer.ioBuffer[writer.ioIndex - 1] &= 0x7F; + writer.position64 += count; + } + +#if COREFX + static readonly Encoding encoding = Encoding.UTF8; +#else + static readonly UTF8Encoding encoding = new UTF8Encoding(); +#endif + + internal static uint Zig(int value) + { + return (uint)((value << 1) ^ (value >> 31)); + } + + internal static ulong Zig(long value) + { + return (ulong)((value << 1) ^ (value >> 63)); + } + + private static void WriteUInt64Variant(ulong value, ProtoWriter writer) + { + DemandSpace(10, writer); + int count = 0; + do + { + writer.ioBuffer[writer.ioIndex++] = (byte)((value & 0x7F) | 0x80); + count++; + } while ((value >>= 7) != 0); + writer.ioBuffer[writer.ioIndex - 1] &= 0x7F; + writer.position64 += count; + } + + /// + /// Writes a string to the stream; supported wire-types: String + /// + public static void WriteString(string value, ProtoWriter writer) + { + if (writer == null) throw new ArgumentNullException("writer"); + if (writer.wireType != WireType.String) throw CreateException(writer); + if (value == null) throw new ArgumentNullException("value"); // written header; now what? + int len = value.Length; + if (len == 0) + { + WriteUInt32Variant(0, writer); + writer.wireType = WireType.None; + return; // just a header + } + int predicted = encoding.GetByteCount(value); + WriteUInt32Variant((uint)predicted, writer); + DemandSpace(predicted, writer); + int actual = encoding.GetBytes(value, 0, value.Length, writer.ioBuffer, writer.ioIndex); + Helpers.DebugAssert(predicted == actual); + IncrementedAndReset(actual, writer); + } + + /// + /// Writes an unsigned 64-bit integer to the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + public static void WriteUInt64(ulong value, ProtoWriter writer) + { + if (writer == null) throw new ArgumentNullException(nameof(writer)); + switch (writer.wireType) + { + case WireType.Fixed64: + ProtoWriter.WriteInt64((long)value, writer); + return; + case WireType.Variant: + WriteUInt64Variant(value, writer); + writer.wireType = WireType.None; + return; + case WireType.Fixed32: + checked { ProtoWriter.WriteUInt32((uint)value, writer); } + return; + default: + throw CreateException(writer); + } + } + + /// + /// Writes a signed 64-bit integer to the stream; supported wire-types: Variant, Fixed32, Fixed64, SignedVariant + /// + public static void WriteInt64(long value, ProtoWriter writer) + { + byte[] buffer; + int index; + if (writer == null) throw new ArgumentNullException(nameof(writer)); + switch (writer.wireType) + { + case WireType.Fixed64: + DemandSpace(8, writer); + buffer = writer.ioBuffer; + index = writer.ioIndex; + +#if NETCOREAPP2_1 + System.Buffers.Binary.BinaryPrimitives.WriteInt64LittleEndian(buffer.AsSpan(index, 8), value); +#else + buffer[index] = (byte)value; + buffer[index + 1] = (byte)(value >> 8); + buffer[index + 2] = (byte)(value >> 16); + buffer[index + 3] = (byte)(value >> 24); + buffer[index + 4] = (byte)(value >> 32); + buffer[index + 5] = (byte)(value >> 40); + buffer[index + 6] = (byte)(value >> 48); + buffer[index + 7] = (byte)(value >> 56); +#endif + IncrementedAndReset(8, writer); + return; + case WireType.SignedVariant: + WriteUInt64Variant(Zig(value), writer); + writer.wireType = WireType.None; + return; + case WireType.Variant: + if (value >= 0) + { + WriteUInt64Variant((ulong)value, writer); + writer.wireType = WireType.None; + } + else + { + DemandSpace(10, writer); + buffer = writer.ioBuffer; + index = writer.ioIndex; + buffer[index] = (byte)(value | 0x80); + buffer[index + 1] = (byte)((int)(value >> 7) | 0x80); + buffer[index + 2] = (byte)((int)(value >> 14) | 0x80); + buffer[index + 3] = (byte)((int)(value >> 21) | 0x80); + buffer[index + 4] = (byte)((int)(value >> 28) | 0x80); + buffer[index + 5] = (byte)((int)(value >> 35) | 0x80); + buffer[index + 6] = (byte)((int)(value >> 42) | 0x80); + buffer[index + 7] = (byte)((int)(value >> 49) | 0x80); + buffer[index + 8] = (byte)((int)(value >> 56) | 0x80); + buffer[index + 9] = 0x01; // sign bit + IncrementedAndReset(10, writer); + } + return; + case WireType.Fixed32: + checked { WriteInt32((int)value, writer); } + return; + default: + throw CreateException(writer); + } + } + + /// + /// Writes an unsigned 16-bit integer to the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + public static void WriteUInt32(uint value, ProtoWriter writer) + { + if (writer == null) throw new ArgumentNullException("writer"); + switch (writer.wireType) + { + case WireType.Fixed32: + ProtoWriter.WriteInt32((int)value, writer); + return; + case WireType.Fixed64: + ProtoWriter.WriteInt64((int)value, writer); + return; + case WireType.Variant: + WriteUInt32Variant(value, writer); + writer.wireType = WireType.None; + return; + default: + throw CreateException(writer); + } + } + + /// + /// Writes a signed 16-bit integer to the stream; supported wire-types: Variant, Fixed32, Fixed64, SignedVariant + /// + public static void WriteInt16(short value, ProtoWriter writer) + { + ProtoWriter.WriteInt32(value, writer); + } + + /// + /// Writes an unsigned 16-bit integer to the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + public static void WriteUInt16(ushort value, ProtoWriter writer) + { + ProtoWriter.WriteUInt32(value, writer); + } + + /// + /// Writes an unsigned 8-bit integer to the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + public static void WriteByte(byte value, ProtoWriter writer) + { + ProtoWriter.WriteUInt32(value, writer); + } + /// + /// Writes a signed 8-bit integer to the stream; supported wire-types: Variant, Fixed32, Fixed64, SignedVariant + /// + public static void WriteSByte(sbyte value, ProtoWriter writer) + { + ProtoWriter.WriteInt32(value, writer); + } + + private static void WriteInt32ToBuffer(int value, byte[] buffer, int index) + { +#if NETCOREAPP2_1 + System.Buffers.Binary.BinaryPrimitives.WriteInt32LittleEndian(buffer.AsSpan(index, 4), value); +#else + buffer[index] = (byte)value; + buffer[index + 1] = (byte)(value >> 8); + buffer[index + 2] = (byte)(value >> 16); + buffer[index + 3] = (byte)(value >> 24); +#endif + } + + /// + /// Writes a signed 32-bit integer to the stream; supported wire-types: Variant, Fixed32, Fixed64, SignedVariant + /// + public static void WriteInt32(int value, ProtoWriter writer) + { + byte[] buffer; + int index; + if (writer == null) throw new ArgumentNullException(nameof(writer)); + switch (writer.wireType) + { + case WireType.Fixed32: + DemandSpace(4, writer); + WriteInt32ToBuffer(value, writer.ioBuffer, writer.ioIndex); + IncrementedAndReset(4, writer); + return; + case WireType.Fixed64: + DemandSpace(8, writer); + buffer = writer.ioBuffer; + index = writer.ioIndex; + buffer[index] = (byte)value; + buffer[index + 1] = (byte)(value >> 8); + buffer[index + 2] = (byte)(value >> 16); + buffer[index + 3] = (byte)(value >> 24); + buffer[index + 4] = buffer[index + 5] = + buffer[index + 6] = buffer[index + 7] = 0; + IncrementedAndReset(8, writer); + return; + case WireType.SignedVariant: + WriteUInt32Variant(Zig(value), writer); + writer.wireType = WireType.None; + return; + case WireType.Variant: + if (value >= 0) + { + WriteUInt32Variant((uint)value, writer); + writer.wireType = WireType.None; + } + else + { + DemandSpace(10, writer); + buffer = writer.ioBuffer; + index = writer.ioIndex; + buffer[index] = (byte)(value | 0x80); + buffer[index + 1] = (byte)((value >> 7) | 0x80); + buffer[index + 2] = (byte)((value >> 14) | 0x80); + buffer[index + 3] = (byte)((value >> 21) | 0x80); + buffer[index + 4] = (byte)((value >> 28) | 0x80); + buffer[index + 5] = buffer[index + 6] = + buffer[index + 7] = buffer[index + 8] = (byte)0xFF; + buffer[index + 9] = (byte)0x01; + IncrementedAndReset(10, writer); + } + return; + default: + throw CreateException(writer); + } + } + + /// + /// Writes a double-precision number to the stream; supported wire-types: Fixed32, Fixed64 + /// + public +#if !FEAT_SAFE + unsafe +#endif + + static void WriteDouble(double value, ProtoWriter writer) + { + if (writer == null) throw new ArgumentNullException("writer"); + switch (writer.wireType) + { + case WireType.Fixed32: + float f = (float)value; + if (float.IsInfinity(f) && !double.IsInfinity(value)) + { + throw new OverflowException(); + } + ProtoWriter.WriteSingle(f, writer); + return; + case WireType.Fixed64: +#if FEAT_SAFE + ProtoWriter.WriteInt64(BitConverter.ToInt64(BitConverter.GetBytes(value), 0), writer); +#else + ProtoWriter.WriteInt64(*(long*)&value, writer); +#endif + return; + default: + throw CreateException(writer); + } + } + /// + /// Writes a single-precision number to the stream; supported wire-types: Fixed32, Fixed64 + /// + public +#if !FEAT_SAFE + unsafe +#endif + static void WriteSingle(float value, ProtoWriter writer) + { + if (writer == null) throw new ArgumentNullException("writer"); + switch (writer.wireType) + { + case WireType.Fixed32: +#if FEAT_SAFE + ProtoWriter.WriteInt32(BitConverter.ToInt32(BitConverter.GetBytes(value), 0), writer); +#else + ProtoWriter.WriteInt32(*(int*)&value, writer); +#endif + return; + case WireType.Fixed64: + ProtoWriter.WriteDouble((double)value, writer); + return; + default: + throw CreateException(writer); + } + } + + /// + /// Throws an exception indicating that the given enum cannot be mapped to a serialized value. + /// + public static void ThrowEnumException(ProtoWriter writer, object enumValue) + { + if (writer == null) throw new ArgumentNullException("writer"); + string rhs = enumValue == null ? "" : (enumValue.GetType().FullName + "." + enumValue.ToString()); + throw new ProtoException("No wire-value is mapped to the enum " + rhs + " at position " + writer.position64.ToString()); + } + + // general purpose serialization exception message + internal static Exception CreateException(ProtoWriter writer) + { + if (writer == null) throw new ArgumentNullException("writer"); + return new ProtoException("Invalid serialization operation with wire-type " + writer.wireType.ToString() + " at position " + writer.position64.ToString()); + } + + /// + /// Writes a boolean to the stream; supported wire-types: Variant, Fixed32, Fixed64 + /// + public static void WriteBoolean(bool value, ProtoWriter writer) + { + ProtoWriter.WriteUInt32(value ? (uint)1 : (uint)0, writer); + } + + /// + /// Copies any extension data stored for the instance to the underlying stream + /// + public static void AppendExtensionData(IExtensible instance, ProtoWriter writer) + { + if (instance == null) throw new ArgumentNullException(nameof(instance)); + if (writer == null) throw new ArgumentNullException(nameof(writer)); + // we expect the writer to be raw here; the extension data will have the + // header detail, so we'll copy it implicitly + if (writer.wireType != WireType.None) throw CreateException(writer); + + IExtension extn = instance.GetExtensionObject(false); + if (extn != null) + { + // unusually we *don't* want "using" here; the "finally" does that, with + // the extension object being responsible for disposal etc + Stream source = extn.BeginQuery(); + try + { + CopyRawFromStream(source, writer); + } + finally { extn.EndQuery(source); } + } + } + + private int packedFieldNumber; + /// + /// Used for packed encoding; indicates that the next field should be skipped rather than + /// a field header written. Note that the field number must match, else an exception is thrown + /// when the attempt is made to write the (incorrect) field. The wire-type is taken from the + /// subsequent call to WriteFieldHeader. Only primitive types can be packed. + /// + public static void SetPackedField(int fieldNumber, ProtoWriter writer) + { + if (fieldNumber <= 0) throw new ArgumentOutOfRangeException(nameof(fieldNumber)); + if (writer == null) throw new ArgumentNullException(nameof(writer)); + writer.packedFieldNumber = fieldNumber; + } + + /// + /// Used for packed encoding; explicitly reset the packed field marker; this is not required + /// if using StartSubItem/EndSubItem + /// + public static void ClearPackedField(int fieldNumber, ProtoWriter writer) + { + if (fieldNumber != writer.packedFieldNumber) + throw new InvalidOperationException("Field mismatch during packed encoding; expected " + writer.packedFieldNumber.ToString() + " but received " + fieldNumber.ToString()); + writer.packedFieldNumber = 0; + } + + /// + /// Used for packed encoding; writes the length prefix using fixed sizes rather than using + /// buffering. Only valid for fixed-32 and fixed-64 encoding. + /// + public static void WritePackedPrefix(int elementCount, WireType wireType, ProtoWriter writer) + { + if (writer.WireType != WireType.String) throw new InvalidOperationException("Invalid wire-type: " + writer.WireType); + if (elementCount < 0) throw new ArgumentOutOfRangeException(nameof(elementCount)); + ulong bytes; + switch (wireType) + { + // use long in case very large arrays are enabled + case WireType.Fixed32: bytes = ((ulong)elementCount) << 2; break; // x4 + case WireType.Fixed64: bytes = ((ulong)elementCount) << 3; break; // x8 + default: + throw new ArgumentOutOfRangeException(nameof(wireType), "Invalid wire-type: " + wireType); + } + WriteUInt64Variant(bytes, writer); + writer.wireType = WireType.None; + } + + internal string SerializeType(Type type) + { + return TypeModel.SerializeType(model, type); + } + + /// + /// Specifies a known root object to use during reference-tracked serialization + /// + public void SetRootObject(object value) + { + NetCache.SetKeyedObject(NetObjectCache.Root, value); + } + + /// + /// Writes a Type to the stream, using the model's DynamicTypeFormatting if appropriate; supported wire-types: String + /// + public static void WriteType(Type value, ProtoWriter writer) + { + if (writer == null) throw new ArgumentNullException(nameof(writer)); + WriteString(writer.SerializeType(value), writer); + } + } +} diff --git a/Runtime/Protobuf-net/ProtoWriter.cs.meta b/Runtime/Protobuf-net/ProtoWriter.cs.meta new file mode 100644 index 0000000..5b91b67 --- /dev/null +++ b/Runtime/Protobuf-net/ProtoWriter.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 63b2636e44dc3824ca2dbc35316e96ec +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/SerializationContext.cs b/Runtime/Protobuf-net/SerializationContext.cs new file mode 100644 index 0000000..80b76af --- /dev/null +++ b/Runtime/Protobuf-net/SerializationContext.cs @@ -0,0 +1,76 @@ +using System; + +namespace ProtoBuf +{ + /// + /// Additional information about a serialization operation + /// + public sealed class SerializationContext + { + private bool frozen; + internal void Freeze() { frozen = true; } + private void ThrowIfFrozen() { if (frozen) throw new InvalidOperationException("The serialization-context cannot be changed once it is in use"); } + private object context; + /// + /// Gets or sets a user-defined object containing additional information about this serialization/deserialization operation. + /// + public object Context + { + get { return context; } + set { if (context != value) { ThrowIfFrozen(); context = value; } } + } + + private static readonly SerializationContext @default; + + static SerializationContext() + { + @default = new SerializationContext(); + @default.Freeze(); + } + /// + /// A default SerializationContext, with minimal information. + /// + internal static SerializationContext Default => @default; +#if PLAT_BINARYFORMATTER + +#if !(COREFX || PROFILE259) + private System.Runtime.Serialization.StreamingContextStates state = System.Runtime.Serialization.StreamingContextStates.Persistence; + /// + /// Gets or sets the source or destination of the transmitted data. + /// + public System.Runtime.Serialization.StreamingContextStates State + { + get { return state; } + set { if (state != value) { ThrowIfFrozen(); state = value; } } + } +#endif + /// + /// Convert a SerializationContext to a StreamingContext + /// + public static implicit operator System.Runtime.Serialization.StreamingContext(SerializationContext ctx) + { +#if COREFX + return new System.Runtime.Serialization.StreamingContext(); +#else + if (ctx == null) return new System.Runtime.Serialization.StreamingContext(System.Runtime.Serialization.StreamingContextStates.Persistence); + return new System.Runtime.Serialization.StreamingContext(ctx.state, ctx.context); +#endif + } + /// + /// Convert a StreamingContext to a SerializationContext + /// + public static implicit operator SerializationContext (System.Runtime.Serialization.StreamingContext ctx) + { + SerializationContext result = new SerializationContext(); + +#if !(COREFX || PROFILE259) + result.Context = ctx.Context; + result.State = ctx.State; +#endif + + return result; + } +#endif + } + +} diff --git a/Runtime/Protobuf-net/SerializationContext.cs.meta b/Runtime/Protobuf-net/SerializationContext.cs.meta new file mode 100644 index 0000000..9bd8dcc --- /dev/null +++ b/Runtime/Protobuf-net/SerializationContext.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 9361aaa524d95b14fbf398ba5bc075a1 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializer.cs b/Runtime/Protobuf-net/Serializer.cs new file mode 100644 index 0000000..8a4c38a --- /dev/null +++ b/Runtime/Protobuf-net/Serializer.cs @@ -0,0 +1,514 @@ +using ProtoBuf.Meta; +using System; +using System.IO; +using System.Collections.Generic; +using System.Reflection; + +namespace ProtoBuf +{ + /// + /// Provides protocol-buffer serialization capability for concrete, attributed types. This + /// is a *default* model, but custom serializer models are also supported. + /// + /// + /// Protocol-buffer serialization is a compact binary format, designed to take + /// advantage of sparse data and knowledge of specific data types; it is also + /// extensible, allowing a type to be deserialized / merged even if some data is + /// not recognised. + /// + public static class Serializer + { +#if !NO_RUNTIME + /// + /// Suggest a .proto definition for the given type + /// + /// The type to generate a .proto definition for + /// The .proto definition as a string + public static string GetProto() => GetProto(ProtoSyntax.Proto2); + + /// + /// Suggest a .proto definition for the given type + /// + /// The type to generate a .proto definition for + /// The .proto definition as a string + public static string GetProto(ProtoSyntax syntax) + { + return RuntimeTypeModel.Default.GetSchema(RuntimeTypeModel.Default.MapType(typeof(T)), syntax); + } + /// + /// Create a deep clone of the supplied instance; any sub-items are also cloned. + /// + public static T DeepClone(T instance) + { + return instance == null ? instance : (T)RuntimeTypeModel.Default.DeepClone(instance); + } + + /// + /// Applies a protocol-buffer stream to an existing instance. + /// + /// The type being merged. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public static T Merge(Stream source, T instance) + { + return (T)RuntimeTypeModel.Default.Deserialize(source, instance, typeof(T)); + } + + /// + /// Creates a new instance from a protocol-buffer stream + /// + /// The type to be created. + /// The binary stream to apply to the new instance (cannot be null). + /// A new, initialized instance. + public static T Deserialize(Stream source) + { + return (T)RuntimeTypeModel.Default.Deserialize(source, null, typeof(T)); + } + + /// + /// Creates a new instance from a protocol-buffer stream + /// + /// The type to be created. + /// The binary stream to apply to the new instance (cannot be null). + /// A new, initialized instance. + public static object Deserialize(Type type, Stream source) + { + return RuntimeTypeModel.Default.Deserialize(source, null, type); + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream. + /// + /// The existing instance to be serialized (cannot be null). + /// The destination stream to write to. + public static void Serialize(Stream destination, T instance) + { + if (instance != null) + { + RuntimeTypeModel.Default.Serialize(destination, instance); + } + } + + /// + /// Serializes a given instance and deserializes it as a different type; + /// this can be used to translate between wire-compatible objects (where + /// two .NET types represent the same data), or to promote/demote a type + /// through an inheritance hierarchy. + /// + /// No assumption of compatibility is made between the types. + /// The type of the object being copied. + /// The type of the new object to be created. + /// The existing instance to use as a template. + /// A new instane of type TNewType, with the data from TOldType. + public static TTo ChangeType(TFrom instance) + { + using (var ms = new MemoryStream()) + { + Serialize(ms, instance); + ms.Position = 0; + return Deserialize(ms); + } + } +#if PLAT_BINARYFORMATTER && !(COREFX || PROFILE259) + /// + /// Writes a protocol-buffer representation of the given instance to the supplied SerializationInfo. + /// + /// The type being serialized. + /// The existing instance to be serialized (cannot be null). + /// The destination SerializationInfo to write to. + public static void Serialize(System.Runtime.Serialization.SerializationInfo info, T instance) where T : class, System.Runtime.Serialization.ISerializable + { + Serialize(info, new System.Runtime.Serialization.StreamingContext(System.Runtime.Serialization.StreamingContextStates.Persistence), instance); + } + /// + /// Writes a protocol-buffer representation of the given instance to the supplied SerializationInfo. + /// + /// The type being serialized. + /// The existing instance to be serialized (cannot be null). + /// The destination SerializationInfo to write to. + /// Additional information about this serialization operation. + public static void Serialize(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context, T instance) where T : class, System.Runtime.Serialization.ISerializable + { + // note: also tried byte[]... it doesn't perform hugely well with either (compared to regular serialization) + if (info == null) throw new ArgumentNullException("info"); + if (instance == null) throw new ArgumentNullException("instance"); + if (instance.GetType() != typeof(T)) throw new ArgumentException("Incorrect type", "instance"); + using (MemoryStream ms = new MemoryStream()) + { + RuntimeTypeModel.Default.Serialize(ms, instance, context); + info.AddValue(ProtoBinaryField, ms.ToArray()); + } + } +#endif +#if PLAT_XMLSERIALIZER + /// + /// Writes a protocol-buffer representation of the given instance to the supplied XmlWriter. + /// + /// The type being serialized. + /// The existing instance to be serialized (cannot be null). + /// The destination XmlWriter to write to. + public static void Serialize(System.Xml.XmlWriter writer, T instance) where T : System.Xml.Serialization.IXmlSerializable + { + if (writer == null) throw new ArgumentNullException("writer"); + if (instance == null) throw new ArgumentNullException("instance"); + + using (MemoryStream ms = new MemoryStream()) + { + Serializer.Serialize(ms, instance); + writer.WriteBase64(Helpers.GetBuffer(ms), 0, (int)ms.Length); + } + } + /// + /// Applies a protocol-buffer from an XmlReader to an existing instance. + /// + /// The type being merged. + /// The existing instance to be modified (cannot be null). + /// The XmlReader containing the data to apply to the instance (cannot be null). + public static void Merge(System.Xml.XmlReader reader, T instance) where T : System.Xml.Serialization.IXmlSerializable + { + if (reader == null) throw new ArgumentNullException("reader"); + if (instance == null) throw new ArgumentNullException("instance"); + + const int LEN = 4096; + byte[] buffer = new byte[LEN]; + int read; + using (MemoryStream ms = new MemoryStream()) + { + int depth = reader.Depth; + while(reader.Read() && reader.Depth > depth) + { + if (reader.NodeType == System.Xml.XmlNodeType.Text) + { + while ((read = reader.ReadContentAsBase64(buffer, 0, LEN)) > 0) + { + ms.Write(buffer, 0, read); + } + if (reader.Depth <= depth) break; + } + } + ms.Position = 0; + Serializer.Merge(ms, instance); + } + } +#endif + + private const string ProtoBinaryField = "proto"; +#if PLAT_BINARYFORMATTER && !(COREFX || PROFILE259) + /// + /// Applies a protocol-buffer from a SerializationInfo to an existing instance. + /// + /// The type being merged. + /// The existing instance to be modified (cannot be null). + /// The SerializationInfo containing the data to apply to the instance (cannot be null). + public static void Merge(System.Runtime.Serialization.SerializationInfo info, T instance) where T : class, System.Runtime.Serialization.ISerializable + { + Merge(info, new System.Runtime.Serialization.StreamingContext(System.Runtime.Serialization.StreamingContextStates.Persistence), instance); + } + /// + /// Applies a protocol-buffer from a SerializationInfo to an existing instance. + /// + /// The type being merged. + /// The existing instance to be modified (cannot be null). + /// The SerializationInfo containing the data to apply to the instance (cannot be null). + /// Additional information about this serialization operation. + public static void Merge(System.Runtime.Serialization.SerializationInfo info, System.Runtime.Serialization.StreamingContext context, T instance) where T : class, System.Runtime.Serialization.ISerializable + { + // note: also tried byte[]... it doesn't perform hugely well with either (compared to regular serialization) + if (info == null) throw new ArgumentNullException("info"); + if (instance == null) throw new ArgumentNullException("instance"); + if (instance.GetType() != typeof(T)) throw new ArgumentException("Incorrect type", "instance"); + + byte[] buffer = (byte[])info.GetValue(ProtoBinaryField, typeof(byte[])); + using (MemoryStream ms = new MemoryStream(buffer)) + { + T result = (T)RuntimeTypeModel.Default.Deserialize(ms, instance, typeof(T), context); + if (!ReferenceEquals(result, instance)) + { + throw new ProtoException("Deserialization changed the instance; cannot succeed."); + } + } + } +#endif + + /// + /// Precompiles the serializer for a given type. + /// + public static void PrepareSerializer() + { + NonGeneric.PrepareSerializer(typeof(T)); + } + +#if PLAT_BINARYFORMATTER && !(COREFX || PROFILE259) + /// + /// Creates a new IFormatter that uses protocol-buffer [de]serialization. + /// + /// The type of object to be [de]deserialized by the formatter. + /// A new IFormatter to be used during [de]serialization. + public static System.Runtime.Serialization.IFormatter CreateFormatter() + { + return RuntimeTypeModel.Default.CreateFormatter(typeof(T)); + } +#endif + /// + /// Reads a sequence of consecutive length-prefixed items from a stream, using + /// either base-128 or fixed-length prefixes. Base-128 prefixes with a tag + /// are directly comparable to serializing multiple items in succession + /// (use the tag to emulate the implicit behavior + /// when serializing a list/array). When a tag is + /// specified, any records with different tags are silently omitted. The + /// tag is ignored. The tag is ignored for fixed-length prefixes. + /// + /// The type of object to deserialize. + /// The binary stream containing the serialized records. + /// The prefix style used in the data. + /// The tag of records to return (if non-positive, then no tag is + /// expected and all records are returned). + /// The sequence of deserialized objects. + public static IEnumerable DeserializeItems(Stream source, PrefixStyle style, int fieldNumber) + { + return RuntimeTypeModel.Default.DeserializeItems(source, style, fieldNumber); + } + + /// + /// Creates a new instance from a protocol-buffer stream that has a length-prefix + /// on data (to assist with network IO). + /// + /// The type to be created. + /// The binary stream to apply to the new instance (cannot be null). + /// How to encode the length prefix. + /// A new, initialized instance. + public static T DeserializeWithLengthPrefix(Stream source, PrefixStyle style) + { + return DeserializeWithLengthPrefix(source, style, 0); + } + + /// + /// Creates a new instance from a protocol-buffer stream that has a length-prefix + /// on data (to assist with network IO). + /// + /// The type to be created. + /// The binary stream to apply to the new instance (cannot be null). + /// How to encode the length prefix. + /// The expected tag of the item (only used with base-128 prefix style). + /// A new, initialized instance. + public static T DeserializeWithLengthPrefix(Stream source, PrefixStyle style, int fieldNumber) + { + RuntimeTypeModel model = RuntimeTypeModel.Default; + return (T)model.DeserializeWithLengthPrefix(source, null, model.MapType(typeof(T)), style, fieldNumber); + } + + /// + /// Applies a protocol-buffer stream to an existing instance, using length-prefixed + /// data - useful with network IO. + /// + /// The type being merged. + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// How to encode the length prefix. + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public static T MergeWithLengthPrefix(Stream source, T instance, PrefixStyle style) + { + RuntimeTypeModel model = RuntimeTypeModel.Default; + return (T)model.DeserializeWithLengthPrefix(source, instance, model.MapType(typeof(T)), style, 0); + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream, + /// with a length-prefix. This is useful for socket programming, + /// as DeserializeWithLengthPrefix/MergeWithLengthPrefix can be used to read the single object back + /// from an ongoing stream. + /// + /// The type being serialized. + /// The existing instance to be serialized (cannot be null). + /// How to encode the length prefix. + /// The destination stream to write to. + public static void SerializeWithLengthPrefix(Stream destination, T instance, PrefixStyle style) + { + SerializeWithLengthPrefix(destination, instance, style, 0); + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream, + /// with a length-prefix. This is useful for socket programming, + /// as DeserializeWithLengthPrefix/MergeWithLengthPrefix can be used to read the single object back + /// from an ongoing stream. + /// + /// The type being serialized. + /// The existing instance to be serialized (cannot be null). + /// How to encode the length prefix. + /// The destination stream to write to. + /// The tag used as a prefix to each record (only used with base-128 style prefixes). + public static void SerializeWithLengthPrefix(Stream destination, T instance, PrefixStyle style, int fieldNumber) + { + RuntimeTypeModel model = RuntimeTypeModel.Default; + model.SerializeWithLengthPrefix(destination, instance, model.MapType(typeof(T)), style, fieldNumber); + } + + /// Indicates the number of bytes expected for the next message. + /// The stream containing the data to investigate for a length. + /// The algorithm used to encode the length. + /// The length of the message, if it could be identified. + /// True if a length could be obtained, false otherwise. + public static bool TryReadLengthPrefix(Stream source, PrefixStyle style, out int length) + { + length = ProtoReader.ReadLengthPrefix(source, false, style, out int fieldNumber, out int bytesRead); + return bytesRead > 0; + } + + /// Indicates the number of bytes expected for the next message. + /// The buffer containing the data to investigate for a length. + /// The offset of the first byte to read from the buffer. + /// The number of bytes to read from the buffer. + /// The algorithm used to encode the length. + /// The length of the message, if it could be identified. + /// True if a length could be obtained, false otherwise. + public static bool TryReadLengthPrefix(byte[] buffer, int index, int count, PrefixStyle style, out int length) + { + using (Stream source = new MemoryStream(buffer, index, count)) + { + return TryReadLengthPrefix(source, style, out length); + } + } +#endif + /// + /// The field number that is used as a default when serializing/deserializing a list of objects. + /// The data is treated as repeated message with field number 1. + /// + public const int ListItemTag = 1; + + +#if !NO_RUNTIME + /// + /// Provides non-generic access to the default serializer. + /// + public static class NonGeneric + { + /// + /// Create a deep clone of the supplied instance; any sub-items are also cloned. + /// + public static object DeepClone(object instance) + { + return instance == null ? null : RuntimeTypeModel.Default.DeepClone(instance); + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream. + /// + /// The existing instance to be serialized (cannot be null). + /// The destination stream to write to. + public static void Serialize(Stream dest, object instance) + { + if (instance != null) + { + RuntimeTypeModel.Default.Serialize(dest, instance); + } + } + + /// + /// Creates a new instance from a protocol-buffer stream + /// + /// The type to be created. + /// The binary stream to apply to the new instance (cannot be null). + /// A new, initialized instance. + public static object Deserialize(Type type, Stream source) + { + return RuntimeTypeModel.Default.Deserialize(source, null, type); + } + + /// Applies a protocol-buffer stream to an existing instance. + /// The existing instance to be modified (cannot be null). + /// The binary stream to apply to the instance (cannot be null). + /// The updated instance + public static object Merge(Stream source, object instance) + { + if (instance == null) throw new ArgumentNullException(nameof(instance)); + return RuntimeTypeModel.Default.Deserialize(source, instance, instance.GetType(), null); + } + + /// + /// Writes a protocol-buffer representation of the given instance to the supplied stream, + /// with a length-prefix. This is useful for socket programming, + /// as DeserializeWithLengthPrefix/MergeWithLengthPrefix can be used to read the single object back + /// from an ongoing stream. + /// + /// The existing instance to be serialized (cannot be null). + /// How to encode the length prefix. + /// The destination stream to write to. + /// The tag used as a prefix to each record (only used with base-128 style prefixes). + public static void SerializeWithLengthPrefix(Stream destination, object instance, PrefixStyle style, int fieldNumber) + { + if (instance == null) throw new ArgumentNullException(nameof(instance)); + RuntimeTypeModel model = RuntimeTypeModel.Default; + model.SerializeWithLengthPrefix(destination, instance, model.MapType(instance.GetType()), style, fieldNumber); + } + /// + /// Applies a protocol-buffer stream to an existing instance (or null), using length-prefixed + /// data - useful with network IO. + /// + /// The existing instance to be modified (can be null). + /// The binary stream to apply to the instance (cannot be null). + /// How to encode the length prefix. + /// Used to resolve types on a per-field basis. + /// The updated instance; this may be different to the instance argument if + /// either the original instance was null, or the stream defines a known sub-type of the + /// original instance. + public static bool TryDeserializeWithLengthPrefix(Stream source, PrefixStyle style, TypeResolver resolver, out object value) + { + value = RuntimeTypeModel.Default.DeserializeWithLengthPrefix(source, null, null, style, 0, resolver); + return value != null; + } + + /// + /// Indicates whether the supplied type is explicitly modelled by the model + /// + public static bool CanSerialize(Type type) => RuntimeTypeModel.Default.IsDefined(type); + + /// + /// Precompiles the serializer for a given type. + /// + public static void PrepareSerializer(Type t) + { +#if FEAT_COMPILER + RuntimeTypeModel model = RuntimeTypeModel.Default; + model[model.MapType(t)].CompileInPlace(); +#endif + } + } + + /// + /// Global switches that change the behavior of protobuf-net + /// + public static class GlobalOptions + { + /// + /// + /// + [Obsolete("Please use RuntimeTypeModel.Default.InferTagFromNameDefault instead (or on a per-model basis)", false)] + public static bool InferTagFromName + { + get { return RuntimeTypeModel.Default.InferTagFromNameDefault; } + set { RuntimeTypeModel.Default.InferTagFromNameDefault = value; } + } + } +#endif + /// + /// Maps a field-number to a type + /// + public delegate Type TypeResolver(int fieldNumber); + + /// + /// Releases any internal buffers that have been reserved for efficiency; this does not affect any serialization + /// operations; simply: it can be used (optionally) to release the buffers for garbage collection (at the expense + /// of having to re-allocate a new buffer for the next operation, rather than re-use prior buffers). + /// + public static void FlushPool() + { + BufferPool.Flush(); + } + } +} diff --git a/Runtime/Protobuf-net/Serializer.cs.meta b/Runtime/Protobuf-net/Serializer.cs.meta new file mode 100644 index 0000000..63cf57d --- /dev/null +++ b/Runtime/Protobuf-net/Serializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: dbd7fc6a1f1a0e34b8a1bce7e93c4f61 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers.meta b/Runtime/Protobuf-net/Serializers.meta new file mode 100644 index 0000000..569acba --- /dev/null +++ b/Runtime/Protobuf-net/Serializers.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 90bd17a736284764ca22da41661472de +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/ArrayDecorator.cs b/Runtime/Protobuf-net/Serializers/ArrayDecorator.cs new file mode 100644 index 0000000..cad005f --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ArrayDecorator.cs @@ -0,0 +1,310 @@ +#if !NO_RUNTIME +using System; +using System.Collections; +using System.Reflection; +using ProtoBuf.Meta; + +namespace ProtoBuf.Serializers +{ + sealed class ArrayDecorator : ProtoDecoratorBase + { + private readonly int fieldNumber; + private const byte + OPTIONS_WritePacked = 1, + OPTIONS_OverwriteList = 2, + OPTIONS_SupportNull = 4; + private readonly byte options; + private readonly WireType packedWireType; + public ArrayDecorator(TypeModel model, IProtoSerializer tail, int fieldNumber, bool writePacked, WireType packedWireType, Type arrayType, bool overwriteList, bool supportNull) + : base(tail) + { + Helpers.DebugAssert(arrayType != null, "arrayType should be non-null"); + Helpers.DebugAssert(arrayType.IsArray && arrayType.GetArrayRank() == 1, "should be single-dimension array; " + arrayType.FullName); + this.itemType = arrayType.GetElementType(); + Type underlyingItemType = supportNull ? itemType : (Helpers.GetUnderlyingType(itemType) ?? itemType); + + Helpers.DebugAssert(underlyingItemType == Tail.ExpectedType + || (Tail.ExpectedType == model.MapType(typeof(object)) && !Helpers.IsValueType(underlyingItemType)), "invalid tail"); + Helpers.DebugAssert(Tail.ExpectedType != model.MapType(typeof(byte)), "Should have used BlobSerializer"); + if ((writePacked || packedWireType != WireType.None) && fieldNumber <= 0) throw new ArgumentOutOfRangeException("fieldNumber"); + if (!ListDecorator.CanPack(packedWireType)) + { + if (writePacked) throw new InvalidOperationException("Only simple data-types can use packed encoding"); + packedWireType = WireType.None; + } + this.fieldNumber = fieldNumber; + this.packedWireType = packedWireType; + if (writePacked) options |= OPTIONS_WritePacked; + if (overwriteList) options |= OPTIONS_OverwriteList; + if (supportNull) options |= OPTIONS_SupportNull; + this.arrayType = arrayType; + } + readonly Type arrayType, itemType; // this is, for example, typeof(int[]) + public override Type ExpectedType { get { return arrayType; } } + public override bool RequiresOldValue { get { return AppendToCollection; } } + public override bool ReturnsValue { get { return true; } } + private bool CanUsePackedPrefix() => CanUsePackedPrefix(packedWireType, itemType); + + internal static bool CanUsePackedPrefix(WireType packedWireType, Type itemType) + { + // needs to be a suitably simple type *and* be definitely not nullable + switch (packedWireType) + { + case WireType.Fixed32: + case WireType.Fixed64: + break; + default: + return false; // nope + } + if (!Helpers.IsValueType(itemType)) return false; + return Helpers.GetUnderlyingType(itemType) == null; + } + +#if FEAT_COMPILER + protected override void EmitWrite(ProtoBuf.Compiler.CompilerContext ctx, ProtoBuf.Compiler.Local valueFrom) + { + // int i and T[] arr + using (Compiler.Local arr = ctx.GetLocalWithValue(arrayType, valueFrom)) + using (Compiler.Local i = new ProtoBuf.Compiler.Local(ctx, ctx.MapType(typeof(int)))) + { + bool writePacked = (options & OPTIONS_WritePacked) != 0; + bool fixedLengthPacked = writePacked && CanUsePackedPrefix(); + + using (Compiler.Local token = (writePacked && !fixedLengthPacked) ? new Compiler.Local(ctx, ctx.MapType(typeof(SubItemToken))) : null) + { + Type mappedWriter = ctx.MapType(typeof(ProtoWriter)); + if (writePacked) + { + ctx.LoadValue(fieldNumber); + ctx.LoadValue((int)WireType.String); + ctx.LoadReaderWriter(); + ctx.EmitCall(mappedWriter.GetMethod("WriteFieldHeader")); + + if (fixedLengthPacked) + { + // write directly - no need for buffering + ctx.LoadLength(arr, false); + ctx.LoadValue((int)packedWireType); + ctx.LoadReaderWriter(); + ctx.EmitCall(mappedWriter.GetMethod("WritePackedPrefix")); + } + else + { + ctx.LoadValue(arr); + ctx.LoadReaderWriter(); + ctx.EmitCall(mappedWriter.GetMethod("StartSubItem")); + ctx.StoreValue(token); + } + ctx.LoadValue(fieldNumber); + ctx.LoadReaderWriter(); + ctx.EmitCall(mappedWriter.GetMethod("SetPackedField")); + } + EmitWriteArrayLoop(ctx, i, arr); + + if (writePacked) + { + if (fixedLengthPacked) + { + ctx.LoadValue(fieldNumber); + ctx.LoadReaderWriter(); + ctx.EmitCall(mappedWriter.GetMethod("ClearPackedField")); + } + else + { + ctx.LoadValue(token); + ctx.LoadReaderWriter(); + ctx.EmitCall(mappedWriter.GetMethod("EndSubItem")); + } + } + } + } + } + + private void EmitWriteArrayLoop(Compiler.CompilerContext ctx, Compiler.Local i, Compiler.Local arr) + { + // i = 0 + ctx.LoadValue(0); + ctx.StoreValue(i); + + // range test is last (to minimise branches) + Compiler.CodeLabel loopTest = ctx.DefineLabel(), processItem = ctx.DefineLabel(); + ctx.Branch(loopTest, false); + ctx.MarkLabel(processItem); + + // {...} + ctx.LoadArrayValue(arr, i); + if (SupportNull) + { + Tail.EmitWrite(ctx, null); + } + else + { + ctx.WriteNullCheckedTail(itemType, Tail, null); + } + + // i++ + ctx.LoadValue(i); + ctx.LoadValue(1); + ctx.Add(); + ctx.StoreValue(i); + + // i < arr.Length + ctx.MarkLabel(loopTest); + ctx.LoadValue(i); + ctx.LoadLength(arr, false); + ctx.BranchIfLess(processItem, false); + } +#endif + private bool AppendToCollection => (options & OPTIONS_OverwriteList) == 0; + + private bool SupportNull { get { return (options & OPTIONS_SupportNull) != 0; } } + + public override void Write(object value, ProtoWriter dest) + { + IList arr = (IList)value; + int len = arr.Count; + SubItemToken token; + bool writePacked = (options & OPTIONS_WritePacked) != 0; + bool fixedLengthPacked = writePacked && CanUsePackedPrefix(); + + if (writePacked) + { + ProtoWriter.WriteFieldHeader(fieldNumber, WireType.String, dest); + + if (fixedLengthPacked) + { + ProtoWriter.WritePackedPrefix(arr.Count, packedWireType, dest); + token = new SubItemToken(); // default + } + else + { + token = ProtoWriter.StartSubItem(value, dest); + } + ProtoWriter.SetPackedField(fieldNumber, dest); + } + else + { + token = new SubItemToken(); // default + } + bool checkForNull = !SupportNull; + for (int i = 0; i < len; i++) + { + object obj = arr[i]; + if (checkForNull && obj == null) { throw new NullReferenceException(); } + Tail.Write(obj, dest); + } + if (writePacked) + { + if (fixedLengthPacked) + { + ProtoWriter.ClearPackedField(fieldNumber, dest); + } + else + { + ProtoWriter.EndSubItem(token, dest); + } + } + } + public override object Read(object value, ProtoReader source) + { + int field = source.FieldNumber; + BasicList list = new BasicList(); + if (packedWireType != WireType.None && source.WireType == WireType.String) + { + SubItemToken token = ProtoReader.StartSubItem(source); + while (ProtoReader.HasSubValue(packedWireType, source)) + { + list.Add(Tail.Read(null, source)); + } + ProtoReader.EndSubItem(token, source); + } + else + { + do + { + list.Add(Tail.Read(null, source)); + } while (source.TryReadFieldHeader(field)); + } + int oldLen = AppendToCollection ? ((value == null ? 0 : ((Array)value).Length)) : 0; + Array result = Array.CreateInstance(itemType, oldLen + list.Count); + if (oldLen != 0) ((Array)value).CopyTo(result, 0); + list.CopyTo(result, oldLen); + return result; + } + +#if FEAT_COMPILER + protected override void EmitRead(ProtoBuf.Compiler.CompilerContext ctx, ProtoBuf.Compiler.Local valueFrom) + { + Type listType; + listType = ctx.MapType(typeof(System.Collections.Generic.List<>)).MakeGenericType(itemType); + Type expected = ExpectedType; + using (Compiler.Local oldArr = AppendToCollection ? ctx.GetLocalWithValue(expected, valueFrom) : null) + using (Compiler.Local newArr = new Compiler.Local(ctx, expected)) + using (Compiler.Local list = new Compiler.Local(ctx, listType)) + { + ctx.EmitCtor(listType); + ctx.StoreValue(list); + ListDecorator.EmitReadList(ctx, list, Tail, listType.GetMethod("Add"), packedWireType, false); + + // leave this "using" here, as it can share the "FieldNumber" local with EmitReadList + using (Compiler.Local oldLen = AppendToCollection ? new ProtoBuf.Compiler.Local(ctx, ctx.MapType(typeof(int))) : null) + { + Type[] copyToArrayInt32Args = new Type[] { ctx.MapType(typeof(Array)), ctx.MapType(typeof(int)) }; + + if (AppendToCollection) + { + ctx.LoadLength(oldArr, true); + ctx.CopyValue(); + ctx.StoreValue(oldLen); + + ctx.LoadAddress(list, listType); + ctx.LoadValue(listType.GetProperty("Count")); + ctx.Add(); + ctx.CreateArray(itemType, null); // length is on the stack + ctx.StoreValue(newArr); + + ctx.LoadValue(oldLen); + Compiler.CodeLabel nothingToCopy = ctx.DefineLabel(); + ctx.BranchIfFalse(nothingToCopy, true); + ctx.LoadValue(oldArr); + ctx.LoadValue(newArr); + ctx.LoadValue(0); // index in target + + ctx.EmitCall(expected.GetMethod("CopyTo", copyToArrayInt32Args)); + ctx.MarkLabel(nothingToCopy); + + ctx.LoadValue(list); + ctx.LoadValue(newArr); + ctx.LoadValue(oldLen); + + } + else + { + ctx.LoadAddress(list, listType); + ctx.LoadValue(listType.GetProperty("Count")); + ctx.CreateArray(itemType, null); + ctx.StoreValue(newArr); + + ctx.LoadAddress(list, listType); + ctx.LoadValue(newArr); + ctx.LoadValue(0); + } + + copyToArrayInt32Args[0] = expected; // // prefer: CopyTo(T[], int) + MethodInfo copyTo = listType.GetMethod("CopyTo", copyToArrayInt32Args); + if (copyTo == null) + { // fallback: CopyTo(Array, int) + copyToArrayInt32Args[1] = ctx.MapType(typeof(Array)); + copyTo = listType.GetMethod("CopyTo", copyToArrayInt32Args); + } + ctx.EmitCall(copyTo); + } + ctx.LoadValue(newArr); + } + + + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/ArrayDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/ArrayDecorator.cs.meta new file mode 100644 index 0000000..6958590 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ArrayDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 3689dde3ac5fd544a9e66158c9713872 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/BlobSerializer.cs b/Runtime/Protobuf-net/Serializers/BlobSerializer.cs new file mode 100644 index 0000000..40b2b89 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/BlobSerializer.cs @@ -0,0 +1,59 @@ +#if !NO_RUNTIME +using System; +#if COREFX +using System.Reflection; +#endif +#if FEAT_COMPILER +using System.Reflection.Emit; +#endif + +namespace ProtoBuf.Serializers +{ + sealed class BlobSerializer : IProtoSerializer + { + public Type ExpectedType { get { return expectedType; } } + + static readonly Type expectedType = typeof(byte[]); + + public BlobSerializer(ProtoBuf.Meta.TypeModel model, bool overwriteList) + { + this.overwriteList = overwriteList; + } + + private readonly bool overwriteList; + + public object Read(object value, ProtoReader source) + { + return ProtoReader.AppendBytes(overwriteList ? null : (byte[])value, source); + } + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteBytes((byte[])value, dest); + } + + bool IProtoSerializer.RequiresOldValue { get { return !overwriteList; } } + bool IProtoSerializer.ReturnsValue { get { return true; } } +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteBytes", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + if (overwriteList) + { + ctx.LoadNullRef(); + } + else + { + ctx.LoadValue(valueFrom); + } + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)) + .GetMethod("AppendBytes")); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/BlobSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/BlobSerializer.cs.meta new file mode 100644 index 0000000..49dd403 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/BlobSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: c04427a4647d6314e82d8a63882dcb8b +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/BooleanSerializer.cs b/Runtime/Protobuf-net/Serializers/BooleanSerializer.cs new file mode 100644 index 0000000..c64886a --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/BooleanSerializer.cs @@ -0,0 +1,41 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class BooleanSerializer : IProtoSerializer + { + static readonly Type expectedType = typeof(bool); + + public BooleanSerializer(ProtoBuf.Meta.TypeModel model) { } + + public Type ExpectedType => expectedType; + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteBoolean((bool)value, dest); + } + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadBoolean(); + } + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteBoolean", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadBoolean", ExpectedType); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/BooleanSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/BooleanSerializer.cs.meta new file mode 100644 index 0000000..f982384 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/BooleanSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 8b73f749f97802947812dc66867ed1f5 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/ByteSerializer.cs b/Runtime/Protobuf-net/Serializers/ByteSerializer.cs new file mode 100644 index 0000000..e44a83c --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ByteSerializer.cs @@ -0,0 +1,42 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class ByteSerializer : IProtoSerializer + { + public Type ExpectedType { get { return expectedType; } } + + static readonly Type expectedType = typeof(byte); + + public ByteSerializer(ProtoBuf.Meta.TypeModel model) { } + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteByte((byte)value, dest); + } + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadByte(); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteByte", valueFrom); + } + + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadByte", ExpectedType); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/ByteSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/ByteSerializer.cs.meta new file mode 100644 index 0000000..23a58b1 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ByteSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: c17779da4eb6b1d489531294afcb2a32 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/CharSerializer.cs b/Runtime/Protobuf-net/Serializers/CharSerializer.cs new file mode 100644 index 0000000..3bc30d0 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/CharSerializer.cs @@ -0,0 +1,32 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class CharSerializer : UInt16Serializer + { + static readonly Type expectedType = typeof(char); + + public CharSerializer(ProtoBuf.Meta.TypeModel model) : base(model) + { + + } + + public override Type ExpectedType => expectedType; + + public override void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteUInt16((ushort)(char)value, dest); + } + + public override object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return (char)source.ReadUInt16(); + } + + // no need for any special IL here; ushort and char are + // interchangeable as long as there is no boxing/unboxing + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/CharSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/CharSerializer.cs.meta new file mode 100644 index 0000000..0424efc --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/CharSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 526090cb730f087469b7f20948f4932a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/CompiledSerializer.cs b/Runtime/Protobuf-net/Serializers/CompiledSerializer.cs new file mode 100644 index 0000000..1ec3027 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/CompiledSerializer.cs @@ -0,0 +1,88 @@ +#if FEAT_COMPILER +using System; +using ProtoBuf.Meta; + +namespace ProtoBuf.Serializers +{ + sealed class CompiledSerializer : IProtoTypeSerializer + { + bool IProtoTypeSerializer.HasCallbacks(TypeModel.CallbackType callbackType) + { + return head.HasCallbacks(callbackType); // these routes only used when bits of the model not compiled + } + + bool IProtoTypeSerializer.CanCreateInstance() + { + return head.CanCreateInstance(); + } + + object IProtoTypeSerializer.CreateInstance(ProtoReader source) + { + return head.CreateInstance(source); + } + + public void Callback(object value, TypeModel.CallbackType callbackType, SerializationContext context) + { + head.Callback(value, callbackType, context); // these routes only used when bits of the model not compiled + } + + public static CompiledSerializer Wrap(IProtoTypeSerializer head, TypeModel model) + { + CompiledSerializer result = head as CompiledSerializer; + if (result == null) + { + result = new CompiledSerializer(head, model); + Helpers.DebugAssert(((IProtoTypeSerializer)result).ExpectedType == head.ExpectedType); + } + return result; + } + + private readonly IProtoTypeSerializer head; + private readonly Compiler.ProtoSerializer serializer; + private readonly Compiler.ProtoDeserializer deserializer; + + private CompiledSerializer(IProtoTypeSerializer head, TypeModel model) + { + this.head = head; + serializer = Compiler.CompilerContext.BuildSerializer(head, model); + deserializer = Compiler.CompilerContext.BuildDeserializer(head, model); + } + + bool IProtoSerializer.RequiresOldValue => head.RequiresOldValue; + + bool IProtoSerializer.ReturnsValue => head.ReturnsValue; + + Type IProtoSerializer.ExpectedType => head.ExpectedType; + + void IProtoSerializer.Write(object value, ProtoWriter dest) + { + serializer(value, dest); + } + + object IProtoSerializer.Read(object value, ProtoReader source) + { + return deserializer(value, source); + } + + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + head.EmitWrite(ctx, valueFrom); + } + + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + head.EmitRead(ctx, valueFrom); + } + + void IProtoTypeSerializer.EmitCallback(Compiler.CompilerContext ctx, Compiler.Local valueFrom, TypeModel.CallbackType callbackType) + { + head.EmitCallback(ctx, valueFrom, callbackType); + } + + void IProtoTypeSerializer.EmitCreateInstance(Compiler.CompilerContext ctx) + { + head.EmitCreateInstance(ctx); + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/CompiledSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/CompiledSerializer.cs.meta new file mode 100644 index 0000000..ddef875 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/CompiledSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 90821da5568834a4682d1a42d7f66963 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/DateTimeSerializer.cs b/Runtime/Protobuf-net/Serializers/DateTimeSerializer.cs new file mode 100644 index 0000000..9755df9 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/DateTimeSerializer.cs @@ -0,0 +1,65 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; + +namespace ProtoBuf.Serializers +{ + internal sealed class DateTimeSerializer : IProtoSerializer + { + private static readonly Type expectedType = typeof(DateTime); + + public Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + bool IProtoSerializer.ReturnsValue => true; + + private readonly bool includeKind, wellKnown; + + public DateTimeSerializer(DataFormat dataFormat, ProtoBuf.Meta.TypeModel model) + { + wellKnown = dataFormat == DataFormat.WellKnown; + includeKind = model?.SerializeDateTimeKind() == true; + } + + public object Read(object value, ProtoReader source) + { + if (wellKnown) + { + return BclHelpers.ReadTimestamp(source); + } + else + { + Helpers.DebugAssert(value == null); // since replaces + return BclHelpers.ReadDateTime(source); + } + } + + public void Write(object value, ProtoWriter dest) + { + if (wellKnown) + BclHelpers.WriteTimestamp((DateTime)value, dest); + else if (includeKind) + BclHelpers.WriteDateTimeWithKind((DateTime)value, dest); + else + BclHelpers.WriteDateTime((DateTime)value, dest); + } +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitWrite(ctx.MapType(typeof(BclHelpers)), + wellKnown ? nameof(BclHelpers.WriteTimestamp) + : includeKind ? nameof(BclHelpers.WriteDateTimeWithKind) : nameof(BclHelpers.WriteDateTime), valueFrom); + } + + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local entity) + { + if (wellKnown) ctx.LoadValue(entity); + ctx.EmitBasicRead(ctx.MapType(typeof(BclHelpers)), + wellKnown ? nameof(BclHelpers.ReadTimestamp) : nameof(BclHelpers.ReadDateTime), + ExpectedType); + } +#endif + + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/DateTimeSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/DateTimeSerializer.cs.meta new file mode 100644 index 0000000..6757f0c --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/DateTimeSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: dfba0a8c252b2e54c96478c9e690c7d3 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/DecimalSerializer.cs b/Runtime/Protobuf-net/Serializers/DecimalSerializer.cs new file mode 100644 index 0000000..1edc621 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/DecimalSerializer.cs @@ -0,0 +1,42 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class DecimalSerializer : IProtoSerializer + { + static readonly Type expectedType = typeof(decimal); + + public DecimalSerializer(ProtoBuf.Meta.TypeModel model) { } + + public Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return BclHelpers.ReadDecimal(source); + } + + public void Write(object value, ProtoWriter dest) + { + BclHelpers.WriteDecimal((decimal)value, dest); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitWrite(ctx.MapType(typeof(BclHelpers)), "WriteDecimal", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead(ctx.MapType(typeof(BclHelpers)), "ReadDecimal", ExpectedType); + } +#endif + + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/DecimalSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/DecimalSerializer.cs.meta new file mode 100644 index 0000000..f8e097a --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/DecimalSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 80efe6cca6916ab46b430c27dc58369c +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/DefaultValueDecorator.cs b/Runtime/Protobuf-net/Serializers/DefaultValueDecorator.cs new file mode 100644 index 0000000..895d0c4 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/DefaultValueDecorator.cs @@ -0,0 +1,259 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; +using ProtoBuf.Meta; + +namespace ProtoBuf.Serializers +{ + sealed class DefaultValueDecorator : ProtoDecoratorBase + { + public override Type ExpectedType => Tail.ExpectedType; + + public override bool RequiresOldValue => Tail.RequiresOldValue; + + public override bool ReturnsValue => Tail.ReturnsValue; + + private readonly object defaultValue; + public DefaultValueDecorator(TypeModel model, object defaultValue, IProtoSerializer tail) : base(tail) + { + if (defaultValue == null) throw new ArgumentNullException(nameof(defaultValue)); + Type type = model.MapType(defaultValue.GetType()); + if (type != tail.ExpectedType) + { + throw new ArgumentException("Default value is of incorrect type", "defaultValue"); + } + this.defaultValue = defaultValue; + } + + public override void Write(object value, ProtoWriter dest) + { + if (!object.Equals(value, defaultValue)) + { + Tail.Write(value, dest); + } + } + + public override object Read(object value, ProtoReader source) + { + return Tail.Read(value, source); + } + +#if FEAT_COMPILER + protected override void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + Compiler.CodeLabel done = ctx.DefineLabel(); + if (valueFrom == null) + { + ctx.CopyValue(); // on the stack + Compiler.CodeLabel needToPop = ctx.DefineLabel(); + EmitBranchIfDefaultValue(ctx, needToPop); + Tail.EmitWrite(ctx, null); + ctx.Branch(done, true); + ctx.MarkLabel(needToPop); + ctx.DiscardValue(); + } + else + { + ctx.LoadValue(valueFrom); // variable/parameter + EmitBranchIfDefaultValue(ctx, done); + Tail.EmitWrite(ctx, valueFrom); + } + ctx.MarkLabel(done); + } + private void EmitBeq(Compiler.CompilerContext ctx, Compiler.CodeLabel label, Type type) + { + switch (Helpers.GetTypeCode(type)) + { + case ProtoTypeCode.Boolean: + case ProtoTypeCode.Byte: + case ProtoTypeCode.Char: + case ProtoTypeCode.Double: + case ProtoTypeCode.Int16: + case ProtoTypeCode.Int32: + case ProtoTypeCode.Int64: + case ProtoTypeCode.SByte: + case ProtoTypeCode.Single: + case ProtoTypeCode.UInt16: + case ProtoTypeCode.UInt32: + case ProtoTypeCode.UInt64: + ctx.BranchIfEqual(label, false); + break; + default: +#if COREFX + MethodInfo method = type.GetMethod("op_Equality", new Type[] { type, type }); + if (method == null || !method.IsPublic || !method.IsStatic) method = null; +#else + MethodInfo method = type.GetMethod("op_Equality", BindingFlags.Public | BindingFlags.Static, + null, new Type[] { type, type }, null); +#endif + if (method == null || method.ReturnType != ctx.MapType(typeof(bool))) + { + throw new InvalidOperationException("No suitable equality operator found for default-values of type: " + type.FullName); + } + ctx.EmitCall(method); + ctx.BranchIfTrue(label, false); + break; + + } + } + private void EmitBranchIfDefaultValue(Compiler.CompilerContext ctx, Compiler.CodeLabel label) + { + Type expected = ExpectedType; + switch (Helpers.GetTypeCode(expected)) + { + case ProtoTypeCode.Boolean: + if ((bool)defaultValue) + { + ctx.BranchIfTrue(label, false); + } + else + { + ctx.BranchIfFalse(label, false); + } + break; + case ProtoTypeCode.Byte: + if ((byte)defaultValue == (byte)0) + { + ctx.BranchIfFalse(label, false); + } + else + { + ctx.LoadValue((int)(byte)defaultValue); + EmitBeq(ctx, label, expected); + } + break; + case ProtoTypeCode.SByte: + if ((sbyte)defaultValue == (sbyte)0) + { + ctx.BranchIfFalse(label, false); + } + else + { + ctx.LoadValue((int)(sbyte)defaultValue); + EmitBeq(ctx, label, expected); + } + break; + case ProtoTypeCode.Int16: + if ((short)defaultValue == (short)0) + { + ctx.BranchIfFalse(label, false); + } + else + { + ctx.LoadValue((int)(short)defaultValue); + EmitBeq(ctx, label, expected); + } + break; + case ProtoTypeCode.UInt16: + if ((ushort)defaultValue == (ushort)0) + { + ctx.BranchIfFalse(label, false); + } + else + { + ctx.LoadValue((int)(ushort)defaultValue); + EmitBeq(ctx, label, expected); + } + break; + case ProtoTypeCode.Int32: + if ((int)defaultValue == (int)0) + { + ctx.BranchIfFalse(label, false); + } + else + { + ctx.LoadValue((int)defaultValue); + EmitBeq(ctx, label, expected); + } + break; + case ProtoTypeCode.UInt32: + if ((uint)defaultValue == (uint)0) + { + ctx.BranchIfFalse(label, false); + } + else + { + ctx.LoadValue((int)(uint)defaultValue); + EmitBeq(ctx, label, expected); + } + break; + case ProtoTypeCode.Char: + if ((char)defaultValue == (char)0) + { + ctx.BranchIfFalse(label, false); + } + else + { + ctx.LoadValue((int)(char)defaultValue); + EmitBeq(ctx, label, expected); + } + break; + case ProtoTypeCode.Int64: + ctx.LoadValue((long)defaultValue); + EmitBeq(ctx, label, expected); + break; + case ProtoTypeCode.UInt64: + ctx.LoadValue((long)(ulong)defaultValue); + EmitBeq(ctx, label, expected); + break; + case ProtoTypeCode.Double: + ctx.LoadValue((double)defaultValue); + EmitBeq(ctx, label, expected); + break; + case ProtoTypeCode.Single: + ctx.LoadValue((float)defaultValue); + EmitBeq(ctx, label, expected); + break; + case ProtoTypeCode.String: + ctx.LoadValue((string)defaultValue); + EmitBeq(ctx, label, expected); + break; + case ProtoTypeCode.Decimal: + { + decimal d = (decimal)defaultValue; + ctx.LoadValue(d); + EmitBeq(ctx, label, expected); + } + break; + case ProtoTypeCode.TimeSpan: + { + TimeSpan ts = (TimeSpan)defaultValue; + if (ts == TimeSpan.Zero) + { + ctx.LoadValue(typeof(TimeSpan).GetField("Zero")); + } + else + { + ctx.LoadValue(ts.Ticks); + ctx.EmitCall(ctx.MapType(typeof(TimeSpan)).GetMethod("FromTicks")); + } + EmitBeq(ctx, label, expected); + break; + } + case ProtoTypeCode.Guid: + { + ctx.LoadValue((Guid)defaultValue); + EmitBeq(ctx, label, expected); + break; + } + case ProtoTypeCode.DateTime: + { + ctx.LoadValue(((DateTime)defaultValue).ToBinary()); + ctx.EmitCall(ctx.MapType(typeof(DateTime)).GetMethod("FromBinary")); + + EmitBeq(ctx, label, expected); + break; + } + default: + throw new NotSupportedException("Type cannot be represented as a default value: " + expected.FullName); + } + } + + protected override void EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + Tail.EmitRead(ctx, valueFrom); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/DefaultValueDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/DefaultValueDecorator.cs.meta new file mode 100644 index 0000000..7cbd6ed --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/DefaultValueDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: ad3a3e386e17b67488f858d409d3e8a7 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/DoubleSerializer.cs b/Runtime/Protobuf-net/Serializers/DoubleSerializer.cs new file mode 100644 index 0000000..8b25523 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/DoubleSerializer.cs @@ -0,0 +1,42 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class DoubleSerializer : IProtoSerializer + { + static readonly Type expectedType = typeof(double); + + public DoubleSerializer(ProtoBuf.Meta.TypeModel model) { } + + public Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadDouble(); + } + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteDouble((double)value, dest); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteDouble", valueFrom); + } + + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadDouble", ExpectedType); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/DoubleSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/DoubleSerializer.cs.meta new file mode 100644 index 0000000..cdba0a7 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/DoubleSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 65b598b3ebee04946abf8957a0f92762 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/EnumSerializer.cs b/Runtime/Protobuf-net/Serializers/EnumSerializer.cs new file mode 100644 index 0000000..78cb78a --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/EnumSerializer.cs @@ -0,0 +1,267 @@ +#if !NO_RUNTIME +using System; +using ProtoBuf.Meta; +using System.Reflection; + +namespace ProtoBuf.Serializers +{ + sealed class EnumSerializer : IProtoSerializer + { + public readonly struct EnumPair + { + public readonly object RawValue; // note that this is boxing, but I'll live with it + public readonly Enum TypedValue; // note that this is boxing, but I'll live with it + public readonly int WireValue; + public EnumPair(int wireValue, object raw, Type type) + { + WireValue = wireValue; + RawValue = raw; + TypedValue = (Enum)Enum.ToObject(type, raw); + } + } + + private readonly Type enumType; + private readonly EnumPair[] map; + public EnumSerializer(Type enumType, EnumPair[] map) + { + this.enumType = enumType ?? throw new ArgumentNullException(nameof(enumType)); + this.map = map; + if (map != null) + { + for (int i = 1; i < map.Length; i++) + for (int j = 0; j < i; j++) + { + if (map[i].WireValue == map[j].WireValue && !Equals(map[i].RawValue, map[j].RawValue)) + { + throw new ProtoException("Multiple enums with wire-value " + map[i].WireValue.ToString()); + } + if (Equals(map[i].RawValue, map[j].RawValue) && map[i].WireValue != map[j].WireValue) + { + throw new ProtoException("Multiple enums with deserialized-value " + map[i].RawValue); + } + } + + } + } + + private ProtoTypeCode GetTypeCode() + { + Type type = Helpers.GetUnderlyingType(enumType); + if (type == null) type = enumType; + return Helpers.GetTypeCode(type); + } + + public Type ExpectedType => enumType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + private int EnumToWire(object value) + { + unchecked + { + switch (GetTypeCode()) + { // unbox then convert to int + case ProtoTypeCode.Byte: return (int)(byte)value; + case ProtoTypeCode.SByte: return (int)(sbyte)value; + case ProtoTypeCode.Int16: return (int)(short)value; + case ProtoTypeCode.Int32: return (int)value; + case ProtoTypeCode.Int64: return (int)(long)value; + case ProtoTypeCode.UInt16: return (int)(ushort)value; + case ProtoTypeCode.UInt32: return (int)(uint)value; + case ProtoTypeCode.UInt64: return (int)(ulong)value; + default: throw new InvalidOperationException(); + } + } + } + + private object WireToEnum(int value) + { + unchecked + { + switch (GetTypeCode()) + { // convert from int then box + case ProtoTypeCode.Byte: return Enum.ToObject(enumType, (byte)value); + case ProtoTypeCode.SByte: return Enum.ToObject(enumType, (sbyte)value); + case ProtoTypeCode.Int16: return Enum.ToObject(enumType, (short)value); + case ProtoTypeCode.Int32: return Enum.ToObject(enumType, value); + case ProtoTypeCode.Int64: return Enum.ToObject(enumType, (long)value); + case ProtoTypeCode.UInt16: return Enum.ToObject(enumType, (ushort)value); + case ProtoTypeCode.UInt32: return Enum.ToObject(enumType, (uint)value); + case ProtoTypeCode.UInt64: return Enum.ToObject(enumType, (ulong)value); + default: throw new InvalidOperationException(); + } + } + } + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + int wireValue = source.ReadInt32(); + if (map == null) + { + return WireToEnum(wireValue); + } + for (int i = 0; i < map.Length; i++) + { + if (map[i].WireValue == wireValue) + { + return map[i].TypedValue; + } + } + source.ThrowEnumException(ExpectedType, wireValue); + return null; // to make compiler happy + } + + public void Write(object value, ProtoWriter dest) + { + if (map == null) + { + ProtoWriter.WriteInt32(EnumToWire(value), dest); + } + else + { + for (int i = 0; i < map.Length; i++) + { + if (object.Equals(map[i].TypedValue, value)) + { + ProtoWriter.WriteInt32(map[i].WireValue, dest); + return; + } + } + ProtoWriter.ThrowEnumException(dest, value); + } + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ProtoTypeCode typeCode = GetTypeCode(); + if (map == null) + { + ctx.LoadValue(valueFrom); + ctx.ConvertToInt32(typeCode, false); + ctx.EmitBasicWrite("WriteInt32", null); + } + else + { + using (Compiler.Local loc = ctx.GetLocalWithValue(ExpectedType, valueFrom)) + { + Compiler.CodeLabel @continue = ctx.DefineLabel(); + for (int i = 0; i < map.Length; i++) + { + Compiler.CodeLabel tryNextValue = ctx.DefineLabel(), processThisValue = ctx.DefineLabel(); + ctx.LoadValue(loc); + WriteEnumValue(ctx, typeCode, map[i].RawValue); + ctx.BranchIfEqual(processThisValue, true); + ctx.Branch(tryNextValue, true); + ctx.MarkLabel(processThisValue); + ctx.LoadValue(map[i].WireValue); + ctx.EmitBasicWrite("WriteInt32", null); + ctx.Branch(@continue, false); + ctx.MarkLabel(tryNextValue); + } + ctx.LoadReaderWriter(); + ctx.LoadValue(loc); + ctx.CastToObject(ExpectedType); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("ThrowEnumException")); + ctx.MarkLabel(@continue); + } + } + } + + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ProtoTypeCode typeCode = GetTypeCode(); + if (map == null) + { + ctx.EmitBasicRead("ReadInt32", ctx.MapType(typeof(int))); + ctx.ConvertFromInt32(typeCode, false); + } + else + { + int[] wireValues = new int[map.Length]; + object[] values = new object[map.Length]; + for (int i = 0; i < map.Length; i++) + { + wireValues[i] = map[i].WireValue; + values[i] = map[i].RawValue; + } + using (Compiler.Local result = new Compiler.Local(ctx, ExpectedType)) + using (Compiler.Local wireValue = new Compiler.Local(ctx, ctx.MapType(typeof(int)))) + { + ctx.EmitBasicRead("ReadInt32", ctx.MapType(typeof(int))); + ctx.StoreValue(wireValue); + Compiler.CodeLabel @continue = ctx.DefineLabel(); + foreach (BasicList.Group group in BasicList.GetContiguousGroups(wireValues, values)) + { + Compiler.CodeLabel tryNextGroup = ctx.DefineLabel(); + int groupItemCount = group.Items.Count; + if (groupItemCount == 1) + { + // discreet group; use an equality test + ctx.LoadValue(wireValue); + ctx.LoadValue(group.First); + Compiler.CodeLabel processThisValue = ctx.DefineLabel(); + ctx.BranchIfEqual(processThisValue, true); + ctx.Branch(tryNextGroup, false); + WriteEnumValue(ctx, typeCode, processThisValue, @continue, group.Items[0], @result); + } + else + { + // implement as a jump-table-based switch + ctx.LoadValue(wireValue); + ctx.LoadValue(group.First); + ctx.Subtract(); // jump-tables are zero-based + Compiler.CodeLabel[] jmp = new Compiler.CodeLabel[groupItemCount]; + for (int i = 0; i < groupItemCount; i++) + { + jmp[i] = ctx.DefineLabel(); + } + ctx.Switch(jmp); + // write the default... + ctx.Branch(tryNextGroup, false); + for (int i = 0; i < groupItemCount; i++) + { + WriteEnumValue(ctx, typeCode, jmp[i], @continue, group.Items[i], @result); + } + } + ctx.MarkLabel(tryNextGroup); + } + // throw source.CreateEnumException(ExpectedType, wireValue); + ctx.LoadReaderWriter(); + ctx.LoadValue(ExpectedType); + ctx.LoadValue(wireValue); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("ThrowEnumException")); + ctx.MarkLabel(@continue); + ctx.LoadValue(result); + } + } + } + private static void WriteEnumValue(Compiler.CompilerContext ctx, ProtoTypeCode typeCode, object value) + { + switch (typeCode) + { + case ProtoTypeCode.Byte: ctx.LoadValue((int)(byte)value); break; + case ProtoTypeCode.SByte: ctx.LoadValue((int)(sbyte)value); break; + case ProtoTypeCode.Int16: ctx.LoadValue((int)(short)value); break; + case ProtoTypeCode.Int32: ctx.LoadValue((int)(int)value); break; + case ProtoTypeCode.Int64: ctx.LoadValue((long)(long)value); break; + case ProtoTypeCode.UInt16: ctx.LoadValue((int)(ushort)value); break; + case ProtoTypeCode.UInt32: ctx.LoadValue((int)(uint)value); break; + case ProtoTypeCode.UInt64: ctx.LoadValue((long)(ulong)value); break; + default: throw new InvalidOperationException(); + } + } + private static void WriteEnumValue(Compiler.CompilerContext ctx, ProtoTypeCode typeCode, Compiler.CodeLabel handler, Compiler.CodeLabel @continue, object value, Compiler.Local local) + { + ctx.MarkLabel(handler); + WriteEnumValue(ctx, typeCode, value); + ctx.StoreValue(local); + ctx.Branch(@continue, false); // "continue" + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/EnumSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/EnumSerializer.cs.meta new file mode 100644 index 0000000..b58d866 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/EnumSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: ef6c6d630a8f5ca449eec10513147563 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/FieldDecorator.cs b/Runtime/Protobuf-net/Serializers/FieldDecorator.cs new file mode 100644 index 0000000..26c0452 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/FieldDecorator.cs @@ -0,0 +1,104 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; + +namespace ProtoBuf.Serializers +{ + sealed class FieldDecorator : ProtoDecoratorBase + { + public override Type ExpectedType => forType; + private readonly FieldInfo field; + private readonly Type forType; + public override bool RequiresOldValue => true; + public override bool ReturnsValue => false; + public FieldDecorator(Type forType, FieldInfo field, IProtoSerializer tail) : base(tail) + { + Helpers.DebugAssert(forType != null); + Helpers.DebugAssert(field != null); + this.forType = forType; + this.field = field; + } + + public override void Write(object value, ProtoWriter dest) + { + Helpers.DebugAssert(value != null); + value = field.GetValue(value); + if (value != null) Tail.Write(value, dest); + } + + public override object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value != null); + object newValue = Tail.Read((Tail.RequiresOldValue ? field.GetValue(value) : null), source); + if (newValue != null) field.SetValue(value, newValue); + return null; + } + + +#if FEAT_COMPILER + protected override void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.LoadAddress(valueFrom, ExpectedType); + ctx.LoadValue(field); + ctx.WriteNullCheckedTail(field.FieldType, Tail, null); + } + protected override void EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + using (Compiler.Local loc = ctx.GetLocalWithValue(ExpectedType, valueFrom)) + { + if (Tail.RequiresOldValue) + { + ctx.LoadAddress(loc, ExpectedType); + ctx.LoadValue(field); + } + // value is either now on the stack or not needed + ctx.ReadNullCheckedTail(field.FieldType, Tail, null); + + // the field could be a backing field that needs to be raised back to + // the property if we're doing a full compile + MemberInfo member = field; + ctx.CheckAccessibility(ref member); + bool writeValue = member is FieldInfo; + + if (writeValue) + { + if (Tail.ReturnsValue) + { + using (Compiler.Local newVal = new Compiler.Local(ctx, field.FieldType)) + { + ctx.StoreValue(newVal); + if (Helpers.IsValueType(field.FieldType)) + { + ctx.LoadAddress(loc, ExpectedType); + ctx.LoadValue(newVal); + ctx.StoreValue(field); + } + else + { + Compiler.CodeLabel allDone = ctx.DefineLabel(); + ctx.LoadValue(newVal); + ctx.BranchIfFalse(allDone, true); // interpret null as "don't assign" + + ctx.LoadAddress(loc, ExpectedType); + ctx.LoadValue(newVal); + ctx.StoreValue(field); + + ctx.MarkLabel(allDone); + } + } + } + } + else + { + // can't use result + if (Tail.ReturnsValue) + { + ctx.DiscardValue(); + } + } + } + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/FieldDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/FieldDecorator.cs.meta new file mode 100644 index 0000000..63065a7 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/FieldDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: f7c1c3141cd2fad47b3112747b44314a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/GuidSerializer.cs b/Runtime/Protobuf-net/Serializers/GuidSerializer.cs new file mode 100644 index 0000000..27556d5 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/GuidSerializer.cs @@ -0,0 +1,43 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class GuidSerializer : IProtoSerializer + { + static readonly Type expectedType = typeof(Guid); + + public GuidSerializer(ProtoBuf.Meta.TypeModel model) { } + + public Type ExpectedType { get { return expectedType; } } + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public void Write(object value, ProtoWriter dest) + { + BclHelpers.WriteGuid((Guid)value, dest); + } + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return BclHelpers.ReadGuid(source); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitWrite(ctx.MapType(typeof(BclHelpers)), "WriteGuid", valueFrom); + } + + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead(ctx.MapType(typeof(BclHelpers)), "ReadGuid", ExpectedType); + } +#endif + + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/GuidSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/GuidSerializer.cs.meta new file mode 100644 index 0000000..7eeb096 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/GuidSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 362fe2dd035b0cb4eaff2c7b7337fc66 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/IProtoSerializer.cs b/Runtime/Protobuf-net/Serializers/IProtoSerializer.cs new file mode 100644 index 0000000..59e8cc2 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/IProtoSerializer.cs @@ -0,0 +1,64 @@ +#if !NO_RUNTIME +using System; + + +namespace ProtoBuf.Serializers +{ + interface IProtoSerializer + { + /// + /// The type that this serializer is intended to work for. + /// + Type ExpectedType { get; } + + /// + /// Perform the steps necessary to serialize this data. + /// + /// The value to be serialized. + /// The writer entity that is accumulating the output data. + void Write(object value, ProtoWriter dest); + + /// + /// Perform the steps necessary to deserialize this data. + /// + /// The current value, if appropriate. + /// The reader providing the input data. + /// The updated / replacement value. + object Read(object value, ProtoReader source); + + /// + /// Indicates whether a Read operation replaces the existing value, or + /// extends the value. If false, the "value" parameter to Read is + /// discarded, and should be passed in as null. + /// + bool RequiresOldValue { get; } + /// + /// Now all Read operations return a value (although most do); if false no + /// value should be expected. + /// + bool ReturnsValue { get; } + +#if FEAT_COMPILER + /// Emit the IL necessary to perform the given actions + /// to serialize this data. + /// + /// Details and utilities for the method being generated. + /// The source of the data to work against; + /// If the value is only needed once, then LoadValue is sufficient. If + /// the value is needed multiple times, then note that a "null" + /// means "the top of the stack", in which case you should create your + /// own copy - GetLocalWithValue. + void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom); + + /// + /// Emit the IL necessary to perform the given actions to deserialize this data. + /// + /// Details and utilities for the method being generated. + /// For nested values, the instance holding the values; note + /// that this is not always provided - a null means not supplied. Since this is always + /// a variable or argument, it is not necessary to consume this value. + void EmitRead(Compiler.CompilerContext ctx, Compiler.Local entity); +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/IProtoSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/IProtoSerializer.cs.meta new file mode 100644 index 0000000..40d402d --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/IProtoSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 6acc35442de99c94aade5d43c7992338 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/IProtoTypeSerializer.cs b/Runtime/Protobuf-net/Serializers/IProtoTypeSerializer.cs new file mode 100644 index 0000000..da1439b --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/IProtoTypeSerializer.cs @@ -0,0 +1,20 @@ +#if !NO_RUNTIME +using ProtoBuf.Meta; +namespace ProtoBuf.Serializers +{ + interface IProtoTypeSerializer : IProtoSerializer + { + bool HasCallbacks(TypeModel.CallbackType callbackType); + bool CanCreateInstance(); + object CreateInstance(ProtoReader source); + void Callback(object value, TypeModel.CallbackType callbackType, SerializationContext context); + +#if FEAT_COMPILER + void EmitCallback(Compiler.CompilerContext ctx, Compiler.Local valueFrom, TypeModel.CallbackType callbackType); +#endif +#if FEAT_COMPILER + void EmitCreateInstance(Compiler.CompilerContext ctx); +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/IProtoTypeSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/IProtoTypeSerializer.cs.meta new file mode 100644 index 0000000..d4c96cf --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/IProtoTypeSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 6974491708512ec41b7a5f29805e4c69 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/ISerializerProxy.cs b/Runtime/Protobuf-net/Serializers/ISerializerProxy.cs new file mode 100644 index 0000000..3ab2cb8 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ISerializerProxy.cs @@ -0,0 +1,10 @@ +#if !NO_RUNTIME + +namespace ProtoBuf.Serializers +{ + interface ISerializerProxy + { + IProtoSerializer Serializer { get; } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/ISerializerProxy.cs.meta b/Runtime/Protobuf-net/Serializers/ISerializerProxy.cs.meta new file mode 100644 index 0000000..aa3cdfa --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ISerializerProxy.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: f717dd1190cbe174587e3bff7dd3dd76 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/ImmutableCollectionDecorator.cs b/Runtime/Protobuf-net/Serializers/ImmutableCollectionDecorator.cs new file mode 100644 index 0000000..918d1fd --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ImmutableCollectionDecorator.cs @@ -0,0 +1,304 @@ +#if !NO_RUNTIME +using System; +using System.Collections; +using System.Reflection; +using ProtoBuf.Meta; + +namespace ProtoBuf.Serializers +{ + sealed class ImmutableCollectionDecorator : ListDecorator + { + protected override bool RequireAdd { get { return false; } } + + static Type ResolveIReadOnlyCollection(Type declaredType, Type t) + { +#if COREFX || PROFILE259 + if (CheckIsIReadOnlyCollectionExactly(declaredType.GetTypeInfo())) return declaredType; + foreach (Type intImplBasic in declaredType.GetTypeInfo().ImplementedInterfaces) + { + TypeInfo intImpl = intImplBasic.GetTypeInfo(); + if (CheckIsIReadOnlyCollectionExactly(intImpl)) return intImplBasic; + } +#else + if (CheckIsIReadOnlyCollectionExactly(declaredType)) return declaredType; + foreach (Type intImpl in declaredType.GetInterfaces()) + { + if (CheckIsIReadOnlyCollectionExactly(intImpl)) return intImpl; + } +#endif + return null; + } + +#if WINRT || COREFX || PROFILE259 + static bool CheckIsIReadOnlyCollectionExactly(TypeInfo t) +#else + static bool CheckIsIReadOnlyCollectionExactly(Type t) +#endif + { + if (t != null && t.IsGenericType && t.Name.StartsWith("IReadOnlyCollection`")) + { +#if WINRT || COREFX || PROFILE259 + Type[] typeArgs = t.GenericTypeArguments; + if (typeArgs.Length != 1 && typeArgs[0].GetTypeInfo().Equals(t)) return false; +#else + Type[] typeArgs = t.GetGenericArguments(); + if (typeArgs.Length != 1 && typeArgs[0] != t) return false; +#endif + + return true; + } + return false; + } + + internal static bool IdentifyImmutable(TypeModel model, Type declaredType, out MethodInfo builderFactory, out PropertyInfo isEmpty, out PropertyInfo length, out MethodInfo add, out MethodInfo addRange, out MethodInfo finish) + { + builderFactory = add = addRange = finish = null; + isEmpty = length = null; + if (model == null || declaredType == null) return false; +#if COREFX || PROFILE259 + TypeInfo declaredTypeInfo = declaredType.GetTypeInfo(); +#else + Type declaredTypeInfo = declaredType; +#endif + + // try to detect immutable collections; firstly, they are all generic, and all implement IReadOnlyCollection for some T + if (!declaredTypeInfo.IsGenericType) return false; + +#if COREFX || PROFILE259 + Type[] typeArgs = declaredTypeInfo.GenericTypeArguments, effectiveType; +#else + Type[] typeArgs = declaredTypeInfo.GetGenericArguments(), effectiveType; +#endif + switch (typeArgs.Length) + { + case 1: + effectiveType = typeArgs; + break; // fine + case 2: + Type kvp = model.MapType(typeof(System.Collections.Generic.KeyValuePair<,>)); + if (kvp == null) return false; + kvp = kvp.MakeGenericType(typeArgs); + effectiveType = new Type[] { kvp }; + break; + default: + return false; // no clue! + } + + if (ResolveIReadOnlyCollection(declaredType, null) == null) return false; // no IReadOnlyCollection found + + // and we want to use the builder API, so for generic Foo or IFoo we want to use Foo.CreateBuilder + string name = declaredType.Name; + int i = name.IndexOf('`'); + if (i <= 0) return false; + name = declaredTypeInfo.IsInterface ? name.Substring(1, i - 1) : name.Substring(0, i); + + Type outerType = model.GetType(declaredType.Namespace + "." + name, declaredTypeInfo.Assembly); + // I hate special-cases... + if (outerType == null && name == "ImmutableSet") + { + outerType = model.GetType(declaredType.Namespace + ".ImmutableHashSet", declaredTypeInfo.Assembly); + } + if (outerType == null) return false; + +#if PROFILE259 + foreach (MethodInfo method in outerType.GetTypeInfo().DeclaredMethods) +#else + foreach (MethodInfo method in outerType.GetMethods()) +#endif + { + if (!method.IsStatic || method.Name != "CreateBuilder" || !method.IsGenericMethodDefinition || method.GetParameters().Length != 0 + || method.GetGenericArguments().Length != typeArgs.Length) continue; + + builderFactory = method.MakeGenericMethod(typeArgs); + break; + } + Type voidType = model.MapType(typeof(void)); + if (builderFactory == null || builderFactory.ReturnType == null || builderFactory.ReturnType == voidType) return false; + +#if COREFX + TypeInfo typeInfo = declaredType.GetTypeInfo(); +#else + Type typeInfo = declaredType; +#endif + isEmpty = Helpers.GetProperty(typeInfo, "IsDefaultOrEmpty", false); //struct based immutabletypes can have both a "default" and "empty" state + if (isEmpty == null) isEmpty = Helpers.GetProperty(typeInfo, "IsEmpty", false); + if (isEmpty == null) + { + //Fallback to checking length if a "IsEmpty" property is not found + length = Helpers.GetProperty(typeInfo, "Length", false); + if (length == null) length = Helpers.GetProperty(typeInfo, "Count", false); + + if (length == null) length = Helpers.GetProperty(ResolveIReadOnlyCollection(declaredType, effectiveType[0]), "Count", false); + + if (length == null) return false; + } + + add = Helpers.GetInstanceMethod(builderFactory.ReturnType, "Add", effectiveType); + if (add == null) return false; + + finish = Helpers.GetInstanceMethod(builderFactory.ReturnType, "ToImmutable", Helpers.EmptyTypes); + if (finish == null || finish.ReturnType == null || finish.ReturnType == voidType) return false; + + if (!(finish.ReturnType == declaredType || Helpers.IsAssignableFrom(declaredType, finish.ReturnType))) return false; + + addRange = Helpers.GetInstanceMethod(builderFactory.ReturnType, "AddRange", new Type[] { declaredType }); + if (addRange == null) + { + Type enumerable = model.MapType(typeof(System.Collections.Generic.IEnumerable<>), false); + if (enumerable != null) + { + addRange = Helpers.GetInstanceMethod(builderFactory.ReturnType, "AddRange", new Type[] { enumerable.MakeGenericType(effectiveType) }); + } + } + + return true; + } + + private readonly MethodInfo builderFactory, add, addRange, finish; + private readonly PropertyInfo isEmpty, length; + internal ImmutableCollectionDecorator(TypeModel model, Type declaredType, Type concreteType, IProtoSerializer tail, int fieldNumber, bool writePacked, WireType packedWireType, bool returnList, bool overwriteList, bool supportNull, + MethodInfo builderFactory, PropertyInfo isEmpty, PropertyInfo length, MethodInfo add, MethodInfo addRange, MethodInfo finish) + : base(model, declaredType, concreteType, tail, fieldNumber, writePacked, packedWireType, returnList, overwriteList, supportNull) + { + this.builderFactory = builderFactory; + this.isEmpty = isEmpty; + this.length = length; + this.add = add; + this.addRange = addRange; + this.finish = finish; + } + + public override object Read(object value, ProtoReader source) + { + object builderInstance = builderFactory.Invoke(null, null); + int field = source.FieldNumber; + object[] args = new object[1]; + if (AppendToCollection && value != null && (isEmpty != null ? !(bool)isEmpty.GetValue(value, null) : (int)length.GetValue(value, null) != 0)) + { + if (addRange != null) + { + args[0] = value; + addRange.Invoke(builderInstance, args); + } + else + { + foreach (object item in (ICollection)value) + { + args[0] = item; + add.Invoke(builderInstance, args); + } + } + } + + if (packedWireType != WireType.None && source.WireType == WireType.String) + { + SubItemToken token = ProtoReader.StartSubItem(source); + while (ProtoReader.HasSubValue(packedWireType, source)) + { + args[0] = Tail.Read(null, source); + add.Invoke(builderInstance, args); + } + ProtoReader.EndSubItem(token, source); + } + else + { + do + { + args[0] = Tail.Read(null, source); + add.Invoke(builderInstance, args); + } while (source.TryReadFieldHeader(field)); + } + + return finish.Invoke(builderInstance, null); + } + +#if FEAT_COMPILER + protected override void EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + using (Compiler.Local oldList = AppendToCollection ? ctx.GetLocalWithValue(ExpectedType, valueFrom) : null) + using (Compiler.Local builder = new Compiler.Local(ctx, builderFactory.ReturnType)) + { + ctx.EmitCall(builderFactory); + ctx.StoreValue(builder); + + if (AppendToCollection) + { + Compiler.CodeLabel done = ctx.DefineLabel(); + if (!Helpers.IsValueType(ExpectedType)) + { + ctx.LoadValue(oldList); + ctx.BranchIfFalse(done, false); // old value null; nothing to add + } + + ctx.LoadAddress(oldList, oldList.Type); + if (isEmpty != null) + { + ctx.EmitCall(Helpers.GetGetMethod(isEmpty, false, false)); + ctx.BranchIfTrue(done, false); // old list is empty; nothing to add + } + else + { + ctx.EmitCall(Helpers.GetGetMethod(length, false, false)); + ctx.BranchIfFalse(done, false); // old list is empty; nothing to add + } + + Type voidType = ctx.MapType(typeof(void)); + if (addRange != null) + { + ctx.LoadValue(builder); + ctx.LoadValue(oldList); + ctx.EmitCall(addRange); + if (addRange.ReturnType != null && add.ReturnType != voidType) ctx.DiscardValue(); + } + else + { + // loop and call Add repeatedly + MethodInfo moveNext, current, getEnumerator = GetEnumeratorInfo(ctx.Model, out moveNext, out current); + Helpers.DebugAssert(moveNext != null); + Helpers.DebugAssert(current != null); + Helpers.DebugAssert(getEnumerator != null); + + Type enumeratorType = getEnumerator.ReturnType; + using (Compiler.Local iter = new Compiler.Local(ctx, enumeratorType)) + { + ctx.LoadAddress(oldList, ExpectedType); + ctx.EmitCall(getEnumerator); + ctx.StoreValue(iter); + using (ctx.Using(iter)) + { + Compiler.CodeLabel body = ctx.DefineLabel(), next = ctx.DefineLabel(); + ctx.Branch(next, false); + + ctx.MarkLabel(body); + ctx.LoadAddress(builder, builder.Type); + ctx.LoadAddress(iter, enumeratorType); + ctx.EmitCall(current); + ctx.EmitCall(add); + if (add.ReturnType != null && add.ReturnType != voidType) ctx.DiscardValue(); + + ctx.MarkLabel(@next); + ctx.LoadAddress(iter, enumeratorType); + ctx.EmitCall(moveNext); + ctx.BranchIfTrue(body, false); + } + } + } + + + ctx.MarkLabel(done); + } + + EmitReadList(ctx, builder, Tail, add, packedWireType, false); + + ctx.LoadAddress(builder, builder.Type); + ctx.EmitCall(finish); + if (ExpectedType != finish.ReturnType) + { + ctx.Cast(ExpectedType); + } + } + } +#endif + } +} +#endif diff --git a/Runtime/Protobuf-net/Serializers/ImmutableCollectionDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/ImmutableCollectionDecorator.cs.meta new file mode 100644 index 0000000..f8d9012 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ImmutableCollectionDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 00a3af58286d1674ca64bcf5fd9f0228 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/Int16Serializer.cs b/Runtime/Protobuf-net/Serializers/Int16Serializer.cs new file mode 100644 index 0000000..eac4eb4 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/Int16Serializer.cs @@ -0,0 +1,42 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class Int16Serializer : IProtoSerializer + { + static readonly Type expectedType = typeof(short); + + public Int16Serializer(ProtoBuf.Meta.TypeModel model) { } + + public Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadInt16(); + } + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteInt16((short)value, dest); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteInt16", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadInt16", ExpectedType); + } +#endif + + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/Int16Serializer.cs.meta b/Runtime/Protobuf-net/Serializers/Int16Serializer.cs.meta new file mode 100644 index 0000000..546159f --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/Int16Serializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: e45229312d2a4fe45b519e513326b708 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/Int32Serializer.cs b/Runtime/Protobuf-net/Serializers/Int32Serializer.cs new file mode 100644 index 0000000..204880e --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/Int32Serializer.cs @@ -0,0 +1,42 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class Int32Serializer : IProtoSerializer + { + static readonly Type expectedType = typeof(int); + + public Int32Serializer(ProtoBuf.Meta.TypeModel model) { } + + public Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadInt32(); + } + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteInt32((int)value, dest); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteInt32", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadInt32", ExpectedType); + } +#endif + + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/Int32Serializer.cs.meta b/Runtime/Protobuf-net/Serializers/Int32Serializer.cs.meta new file mode 100644 index 0000000..1be4c7e --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/Int32Serializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 4a7c49bc45156f442bfe84fc7eef04b9 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/Int64Serializer.cs b/Runtime/Protobuf-net/Serializers/Int64Serializer.cs new file mode 100644 index 0000000..2791a1e --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/Int64Serializer.cs @@ -0,0 +1,41 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class Int64Serializer : IProtoSerializer + { + static readonly Type expectedType = typeof(long); + + public Int64Serializer(ProtoBuf.Meta.TypeModel model) { } + + public Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadInt64(); + } + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteInt64((long)value, dest); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteInt64", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadInt64", ExpectedType); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/Int64Serializer.cs.meta b/Runtime/Protobuf-net/Serializers/Int64Serializer.cs.meta new file mode 100644 index 0000000..8dbba5e --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/Int64Serializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 03f2770a306f45046b6e8eab757c9188 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/ListDecorator.cs b/Runtime/Protobuf-net/Serializers/ListDecorator.cs new file mode 100644 index 0000000..82bb128 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ListDecorator.cs @@ -0,0 +1,579 @@ +#if !NO_RUNTIME +using System; +using System.Collections; +using ProtoBuf.Meta; +using System.Reflection; + +namespace ProtoBuf.Serializers +{ + class ListDecorator : ProtoDecoratorBase + { + internal static bool CanPack(WireType wireType) + { + switch (wireType) + { + case WireType.Fixed32: + case WireType.Fixed64: + case WireType.SignedVariant: + case WireType.Variant: + return true; + default: + return false; + } + } + + private readonly byte options; + + private const byte OPTIONS_IsList = 1, + OPTIONS_SuppressIList = 2, + OPTIONS_WritePacked = 4, + OPTIONS_ReturnList = 8, + OPTIONS_OverwriteList = 16, + OPTIONS_SupportNull = 32; + + private readonly Type declaredType, concreteType; + + private readonly MethodInfo add; + + private readonly int fieldNumber; + + private bool IsList { get { return (options & OPTIONS_IsList) != 0; } } + private bool SuppressIList { get { return (options & OPTIONS_SuppressIList) != 0; } } + private bool WritePacked { get { return (options & OPTIONS_WritePacked) != 0; } } + private bool SupportNull { get { return (options & OPTIONS_SupportNull) != 0; } } + private bool ReturnList { get { return (options & OPTIONS_ReturnList) != 0; } } + protected readonly WireType packedWireType; + + internal static ListDecorator Create(TypeModel model, Type declaredType, Type concreteType, IProtoSerializer tail, int fieldNumber, bool writePacked, WireType packedWireType, bool returnList, bool overwriteList, bool supportNull) + { + if (returnList && ImmutableCollectionDecorator.IdentifyImmutable(model, declaredType, + out MethodInfo builderFactory, + out PropertyInfo isEmpty, + out PropertyInfo length, + out MethodInfo add, + out MethodInfo addRange, + out MethodInfo finish)) + { + return new ImmutableCollectionDecorator( + model, declaredType, concreteType, tail, fieldNumber, writePacked, packedWireType, returnList, overwriteList, supportNull, + builderFactory, isEmpty, length, add, addRange, finish); + } + + return new ListDecorator(model, declaredType, concreteType, tail, fieldNumber, writePacked, packedWireType, returnList, overwriteList, supportNull); + } + + protected ListDecorator(TypeModel model, Type declaredType, Type concreteType, IProtoSerializer tail, int fieldNumber, bool writePacked, WireType packedWireType, bool returnList, bool overwriteList, bool supportNull) + : base(tail) + { + if (returnList) options |= OPTIONS_ReturnList; + if (overwriteList) options |= OPTIONS_OverwriteList; + if (supportNull) options |= OPTIONS_SupportNull; + if ((writePacked || packedWireType != WireType.None) && fieldNumber <= 0) throw new ArgumentOutOfRangeException("fieldNumber"); + if (!CanPack(packedWireType)) + { + if (writePacked) throw new InvalidOperationException("Only simple data-types can use packed encoding"); + packedWireType = WireType.None; + } + + this.fieldNumber = fieldNumber; + if (writePacked) options |= OPTIONS_WritePacked; + this.packedWireType = packedWireType; + if (declaredType == null) throw new ArgumentNullException("declaredType"); + if (declaredType.IsArray) throw new ArgumentException("Cannot treat arrays as lists", "declaredType"); + this.declaredType = declaredType; + this.concreteType = concreteType; + + // look for a public list.Add(typedObject) method + if (RequireAdd) + { + bool isList; + add = TypeModel.ResolveListAdd(model, declaredType, tail.ExpectedType, out isList); + if (isList) + { + options |= OPTIONS_IsList; + string fullName = declaredType.FullName; + if (fullName != null && fullName.StartsWith("System.Data.Linq.EntitySet`1[[")) + { // see http://stackoverflow.com/questions/6194639/entityset-is-there-a-sane-reason-that-ilist-add-doesnt-set-assigned + options |= OPTIONS_SuppressIList; + } + } + if (add == null) throw new InvalidOperationException("Unable to resolve a suitable Add method for " + declaredType.FullName); + } + + } + protected virtual bool RequireAdd => true; + + public override Type ExpectedType => declaredType; + + public override bool RequiresOldValue => AppendToCollection; + + public override bool ReturnsValue => ReturnList; + + protected bool AppendToCollection + { + get { return (options & OPTIONS_OverwriteList) == 0; } + } + +#if FEAT_COMPILER + protected override void EmitRead(ProtoBuf.Compiler.CompilerContext ctx, ProtoBuf.Compiler.Local valueFrom) + { + /* This looks more complex than it is. Look at the non-compiled Read to + * see what it is trying to do, but note that it needs to cope with a + * few more scenarios. Note that it picks the **most specific** Add, + * unlike the runtime version that uses IList when possible. The core + * is just a "do {list.Add(readValue())} while {thereIsMore}" + * + * The complexity is due to: + * - value types vs reference types (boxing etc) + * - initialization if we need to pass in a value to the tail + * - handling whether or not the tail *returns* the value vs updates the input + */ + bool returnList = ReturnList; + + using (Compiler.Local list = AppendToCollection ? ctx.GetLocalWithValue(ExpectedType, valueFrom) : new Compiler.Local(ctx, declaredType)) + using (Compiler.Local origlist = (returnList && AppendToCollection && !Helpers.IsValueType(ExpectedType)) ? new Compiler.Local(ctx, ExpectedType) : null) + { + if (!AppendToCollection) + { // always new + ctx.LoadNullRef(); + ctx.StoreValue(list); + } + else if (returnList && origlist != null) + { // need a copy + ctx.LoadValue(list); + ctx.StoreValue(origlist); + } + if (concreteType != null) + { + ctx.LoadValue(list); + Compiler.CodeLabel notNull = ctx.DefineLabel(); + ctx.BranchIfTrue(notNull, true); + ctx.EmitCtor(concreteType); + ctx.StoreValue(list); + ctx.MarkLabel(notNull); + } + + bool castListForAdd = !add.DeclaringType.IsAssignableFrom(declaredType); + EmitReadList(ctx, list, Tail, add, packedWireType, castListForAdd); + + if (returnList) + { + if (AppendToCollection && origlist != null) + { + // remember ^^^^ we had a spare copy of the list on the stack; now we'll compare + ctx.LoadValue(origlist); + ctx.LoadValue(list); // [orig] [new-value] + Compiler.CodeLabel sameList = ctx.DefineLabel(), allDone = ctx.DefineLabel(); + ctx.BranchIfEqual(sameList, true); + ctx.LoadValue(list); + ctx.Branch(allDone, true); + ctx.MarkLabel(sameList); + ctx.LoadNullRef(); + ctx.MarkLabel(allDone); + } + else + { + ctx.LoadValue(list); + } + } + } + } + + internal static void EmitReadList(ProtoBuf.Compiler.CompilerContext ctx, Compiler.Local list, IProtoSerializer tail, MethodInfo add, WireType packedWireType, bool castListForAdd) + { + using (Compiler.Local fieldNumber = new Compiler.Local(ctx, ctx.MapType(typeof(int)))) + { + Compiler.CodeLabel readPacked = packedWireType == WireType.None ? new Compiler.CodeLabel() : ctx.DefineLabel(); + if (packedWireType != WireType.None) + { + ctx.LoadReaderWriter(); + ctx.LoadValue(typeof(ProtoReader).GetProperty("WireType")); + ctx.LoadValue((int)WireType.String); + ctx.BranchIfEqual(readPacked, false); + } + ctx.LoadReaderWriter(); + ctx.LoadValue(typeof(ProtoReader).GetProperty("FieldNumber")); + ctx.StoreValue(fieldNumber); + + Compiler.CodeLabel @continue = ctx.DefineLabel(); + ctx.MarkLabel(@continue); + + EmitReadAndAddItem(ctx, list, tail, add, castListForAdd); + + ctx.LoadReaderWriter(); + ctx.LoadValue(fieldNumber); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("TryReadFieldHeader")); + ctx.BranchIfTrue(@continue, false); + + if (packedWireType != WireType.None) + { + Compiler.CodeLabel allDone = ctx.DefineLabel(); + ctx.Branch(allDone, false); + ctx.MarkLabel(readPacked); + + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("StartSubItem")); + + Compiler.CodeLabel testForData = ctx.DefineLabel(), noMoreData = ctx.DefineLabel(); + ctx.MarkLabel(testForData); + ctx.LoadValue((int)packedWireType); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("HasSubValue")); + ctx.BranchIfFalse(noMoreData, false); + + EmitReadAndAddItem(ctx, list, tail, add, castListForAdd); + ctx.Branch(testForData, false); + + ctx.MarkLabel(noMoreData); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("EndSubItem")); + ctx.MarkLabel(allDone); + } + } + } + + private static void EmitReadAndAddItem(Compiler.CompilerContext ctx, Compiler.Local list, IProtoSerializer tail, MethodInfo add, bool castListForAdd) + { + ctx.LoadAddress(list, list.Type); // needs to be the reference in case the list is value-type (static-call) + if (castListForAdd) ctx.Cast(add.DeclaringType); + + Type itemType = tail.ExpectedType; + bool tailReturnsValue = tail.ReturnsValue; + if (tail.RequiresOldValue) + { + if (Helpers.IsValueType(itemType) || !tailReturnsValue) + { + // going to need a variable + using (Compiler.Local item = new Compiler.Local(ctx, itemType)) + { + if (Helpers.IsValueType(itemType)) + { // initialise the struct + ctx.LoadAddress(item, itemType); + ctx.EmitCtor(itemType); + } + else + { // assign null + ctx.LoadNullRef(); + ctx.StoreValue(item); + } + tail.EmitRead(ctx, item); + if (!tailReturnsValue) { ctx.LoadValue(item); } + } + } + else + { // no variable; pass the null on the stack and take the value *off* the stack + ctx.LoadNullRef(); + tail.EmitRead(ctx, null); + } + } + else + { + if (tailReturnsValue) + { // out only (on the stack); just emit it + tail.EmitRead(ctx, null); + } + else + { // doesn't take anything in nor return anything! WTF? + throw new InvalidOperationException(); + } + } + // our "Add" is chosen either to take the correct type, or to take "object"; + // we may need to box the value + + Type addParamType = add.GetParameters()[0].ParameterType; + if (addParamType != itemType) + { + if (addParamType == ctx.MapType(typeof(object))) + { + ctx.CastToObject(itemType); + } + else if (Helpers.GetUnderlyingType(addParamType) == itemType) + { // list is nullable + ConstructorInfo ctor = Helpers.GetConstructor(addParamType, new Type[] { itemType }, false); + ctx.EmitCtor(ctor); // the itemType on the stack is now a Nullable + } + else + { + throw new InvalidOperationException("Conflicting item/add type"); + } + } + ctx.EmitCall(add, list.Type); + if (add.ReturnType != ctx.MapType(typeof(void))) + { + ctx.DiscardValue(); + } + } +#endif + +#if COREFX + private static readonly TypeInfo ienumeratorType = typeof(IEnumerator).GetTypeInfo(), ienumerableType = typeof (IEnumerable).GetTypeInfo(); +#else + private static readonly System.Type ienumeratorType = typeof(IEnumerator), ienumerableType = typeof(IEnumerable); +#endif + protected MethodInfo GetEnumeratorInfo(TypeModel model, out MethodInfo moveNext, out MethodInfo current) + => GetEnumeratorInfo(model, ExpectedType, Tail.ExpectedType, out moveNext, out current); + internal static MethodInfo GetEnumeratorInfo(TypeModel model, Type expectedType, Type itemType, out MethodInfo moveNext, out MethodInfo current) + { + +#if COREFX + TypeInfo enumeratorType = null, iteratorType; +#else + Type enumeratorType = null, iteratorType; +#endif + + // try a custom enumerator + MethodInfo getEnumerator = Helpers.GetInstanceMethod(expectedType, "GetEnumerator", null); + + Type getReturnType = null; + if (getEnumerator != null) + { + getReturnType = getEnumerator.ReturnType; + iteratorType = getReturnType +#if COREFX || COREFX + .GetTypeInfo() +#endif + ; + moveNext = Helpers.GetInstanceMethod(iteratorType, "MoveNext", null); + PropertyInfo prop = Helpers.GetProperty(iteratorType, "Current", false); + current = prop == null ? null : Helpers.GetGetMethod(prop, false, false); +#if PROFILE259 + if (moveNext == null && (model.MapType(ienumeratorType).GetTypeInfo().IsAssignableFrom(iteratorType.GetTypeInfo()))) +#else + if (moveNext == null && (model.MapType(ienumeratorType).IsAssignableFrom(iteratorType))) +#endif + { + moveNext = Helpers.GetInstanceMethod(model.MapType(ienumeratorType), "MoveNext", null); + } + // fully typed + if (moveNext != null && moveNext.ReturnType == model.MapType(typeof(bool)) + && current != null && current.ReturnType == itemType) + { + return getEnumerator; + } + moveNext = current = getEnumerator = null; + } + + // try IEnumerable + Type tmp = model.MapType(typeof(System.Collections.Generic.IEnumerable<>), false); + + if (tmp != null) + { + tmp = tmp.MakeGenericType(itemType); + +#if COREFX + enumeratorType = tmp.GetTypeInfo(); +#else + enumeratorType = tmp; +#endif + } +; +#if PROFILE259 + if (enumeratorType != null && enumeratorType.GetTypeInfo().IsAssignableFrom(expectedType +#else + if (enumeratorType != null && enumeratorType.IsAssignableFrom(expectedType +#endif +#if COREFX || PROFILE259 + .GetTypeInfo() +#endif + )) + { + getEnumerator = Helpers.GetInstanceMethod(enumeratorType, "GetEnumerator"); + getReturnType = getEnumerator.ReturnType; + +#if COREFX + iteratorType = getReturnType.GetTypeInfo(); +#else + iteratorType = getReturnType; +#endif + + moveNext = Helpers.GetInstanceMethod(model.MapType(ienumeratorType), "MoveNext"); + current = Helpers.GetGetMethod(Helpers.GetProperty(iteratorType, "Current", false), false, false); + return getEnumerator; + } + // give up and fall-back to non-generic IEnumerable + enumeratorType = model.MapType(ienumerableType); + getEnumerator = Helpers.GetInstanceMethod(enumeratorType, "GetEnumerator"); + getReturnType = getEnumerator.ReturnType; + iteratorType = getReturnType +#if COREFX + .GetTypeInfo() +#endif + ; + moveNext = Helpers.GetInstanceMethod(iteratorType, "MoveNext"); + current = Helpers.GetGetMethod(Helpers.GetProperty(iteratorType, "Current", false), false, false); + return getEnumerator; + } +#if FEAT_COMPILER + protected override void EmitWrite(ProtoBuf.Compiler.CompilerContext ctx, ProtoBuf.Compiler.Local valueFrom) + { + using (Compiler.Local list = ctx.GetLocalWithValue(ExpectedType, valueFrom)) + { + MethodInfo getEnumerator = GetEnumeratorInfo(ctx.Model, out MethodInfo moveNext, out MethodInfo current); + Helpers.DebugAssert(moveNext != null); + Helpers.DebugAssert(current != null); + Helpers.DebugAssert(getEnumerator != null); + Type enumeratorType = getEnumerator.ReturnType; + bool writePacked = WritePacked; + using (Compiler.Local iter = new Compiler.Local(ctx, enumeratorType)) + using (Compiler.Local token = writePacked ? new Compiler.Local(ctx, ctx.MapType(typeof(SubItemToken))) : null) + { + if (writePacked) + { + ctx.LoadValue(fieldNumber); + ctx.LoadValue((int)WireType.String); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("WriteFieldHeader")); + + ctx.LoadValue(list); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("StartSubItem")); + ctx.StoreValue(token); + + ctx.LoadValue(fieldNumber); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("SetPackedField")); + } + + ctx.LoadAddress(list, ExpectedType); + ctx.EmitCall(getEnumerator, ExpectedType); + ctx.StoreValue(iter); + using (ctx.Using(iter)) + { + Compiler.CodeLabel body = ctx.DefineLabel(), next = ctx.DefineLabel(); + ctx.Branch(next, false); + + ctx.MarkLabel(body); + + ctx.LoadAddress(iter, enumeratorType); + ctx.EmitCall(current, enumeratorType); + Type itemType = Tail.ExpectedType; + if (itemType != ctx.MapType(typeof(object)) && current.ReturnType == ctx.MapType(typeof(object))) + { + ctx.CastFromObject(itemType); + } + Tail.EmitWrite(ctx, null); + + ctx.MarkLabel(@next); + ctx.LoadAddress(iter, enumeratorType); + ctx.EmitCall(moveNext, enumeratorType); + ctx.BranchIfTrue(body, false); + } + + if (writePacked) + { + ctx.LoadValue(token); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("EndSubItem")); + } + } + } + } +#endif + + public override void Write(object value, ProtoWriter dest) + { + SubItemToken token; + bool writePacked = WritePacked; + bool fixedSizePacked = writePacked & CanUsePackedPrefix(value) && value is ICollection; + if (writePacked) + { + ProtoWriter.WriteFieldHeader(fieldNumber, WireType.String, dest); + if (fixedSizePacked) + { + ProtoWriter.WritePackedPrefix(((ICollection)value).Count, packedWireType, dest); + token = default(SubItemToken); + } + else + { + token = ProtoWriter.StartSubItem(value, dest); + } + ProtoWriter.SetPackedField(fieldNumber, dest); + } + else + { + token = new SubItemToken(); // default + } + bool checkForNull = !SupportNull; + foreach (object subItem in (IEnumerable)value) + { + if (checkForNull && subItem == null) { throw new NullReferenceException(); } + Tail.Write(subItem, dest); + } + if (writePacked) + { + if (fixedSizePacked) + { + ProtoWriter.ClearPackedField(fieldNumber, dest); + } + else + { + ProtoWriter.EndSubItem(token, dest); + } + } + } + + private bool CanUsePackedPrefix(object obj) => + ArrayDecorator.CanUsePackedPrefix(packedWireType, Tail.ExpectedType); + + public override object Read(object value, ProtoReader source) + { + try + { + int field = source.FieldNumber; + object origValue = value; + if (value == null) value = Activator.CreateInstance(concreteType); + bool isList = IsList && !SuppressIList; + if (packedWireType != WireType.None && source.WireType == WireType.String) + { + SubItemToken token = ProtoReader.StartSubItem(source); + if (isList) + { + IList list = (IList)value; + while (ProtoReader.HasSubValue(packedWireType, source)) + { + list.Add(Tail.Read(null, source)); + } + } + else + { + object[] args = new object[1]; + while (ProtoReader.HasSubValue(packedWireType, source)) + { + args[0] = Tail.Read(null, source); + add.Invoke(value, args); + } + } + ProtoReader.EndSubItem(token, source); + } + else + { + if (isList) + { + IList list = (IList)value; + do + { + list.Add(Tail.Read(null, source)); + } while (source.TryReadFieldHeader(field)); + } + else + { + object[] args = new object[1]; + do + { + args[0] = Tail.Read(null, source); + add.Invoke(value, args); + } while (source.TryReadFieldHeader(field)); + } + } + return origValue == value ? null : value; + } + catch (TargetInvocationException tie) + { + if (tie.InnerException != null) throw tie.InnerException; + throw; + } + } + + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/ListDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/ListDecorator.cs.meta new file mode 100644 index 0000000..a5980a2 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ListDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: eb7a73aa78c887c478b0af6d506337d5 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/MapDecorator.cs b/Runtime/Protobuf-net/Serializers/MapDecorator.cs new file mode 100644 index 0000000..033cf26 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/MapDecorator.cs @@ -0,0 +1,298 @@ +using ProtoBuf.Meta; +using System; +#if FEAT_COMPILER +using ProtoBuf.Compiler; +#endif +using System.Collections.Generic; +using System.Reflection; + +namespace ProtoBuf.Serializers +{ + class MapDecorator : ProtoDecoratorBase where TDictionary : class, IDictionary + { + private readonly Type concreteType; + private readonly IProtoSerializer keyTail; + private readonly int fieldNumber; + private readonly WireType wireType; + + internal MapDecorator(TypeModel model, Type concreteType, IProtoSerializer keyTail, IProtoSerializer valueTail, + int fieldNumber, WireType wireType, WireType keyWireType, WireType valueWireType, bool overwriteList) + : base(DefaultValue == null + ? (IProtoSerializer)new TagDecorator(2, valueWireType, false, valueTail) + : (IProtoSerializer)new DefaultValueDecorator(model, DefaultValue, new TagDecorator(2, valueWireType, false, valueTail))) + { + this.wireType = wireType; + this.keyTail = new DefaultValueDecorator(model, DefaultKey, new TagDecorator(1, keyWireType, false, keyTail)); + this.fieldNumber = fieldNumber; + this.concreteType = concreteType ?? typeof(TDictionary); + + if (keyTail.RequiresOldValue) throw new InvalidOperationException("Key tail should not require the old value"); + if (!keyTail.ReturnsValue) throw new InvalidOperationException("Key tail should return a value"); + if (!valueTail.ReturnsValue) throw new InvalidOperationException("Value tail should return a value"); + + AppendToCollection = !overwriteList; + } + + private static readonly MethodInfo indexerSet = GetIndexerSetter(); + + private static MethodInfo GetIndexerSetter() + { +#if PROFILE259 + foreach(var prop in typeof(TDictionary).GetRuntimeProperties()) +#else + foreach (var prop in typeof(TDictionary).GetProperties(BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic)) +#endif + { + if (prop.Name != "Item") continue; + if (prop.PropertyType != typeof(TValue)) continue; + + var args = prop.GetIndexParameters(); + if (args == null || args.Length != 1) continue; + + if (args[0].ParameterType != typeof(TKey)) continue; +#if PROFILE259 + var method = prop.SetMethod; +#else + var method = prop.GetSetMethod(true); +#endif + if (method != null) + { + return method; + } + } + throw new InvalidOperationException("Unable to resolve indexer for map"); + } + + private static readonly TKey DefaultKey = (typeof(TKey) == typeof(string)) ? (TKey)(object)"" : default(TKey); + private static readonly TValue DefaultValue = (typeof(TValue) == typeof(string)) ? (TValue)(object)"" : default(TValue); + public override Type ExpectedType => typeof(TDictionary); + + public override bool ReturnsValue => true; + + public override bool RequiresOldValue => AppendToCollection; + + private bool AppendToCollection { get; } + + public override object Read(object untyped, ProtoReader source) + { + TDictionary typed = AppendToCollection ? ((TDictionary)untyped) : null; + if (typed == null) typed = (TDictionary)Activator.CreateInstance(concreteType); + + do + { + var key = DefaultKey; + var value = DefaultValue; + SubItemToken token = ProtoReader.StartSubItem(source); + int field; + while ((field = source.ReadFieldHeader()) > 0) + { + switch (field) + { + case 1: + key = (TKey)keyTail.Read(null, source); + break; + case 2: + value = (TValue)Tail.Read(Tail.RequiresOldValue ? (object)value : null, source); + break; + default: + source.SkipField(); + break; + } + } + + ProtoReader.EndSubItem(token, source); + typed[key] = value; + } while (source.TryReadFieldHeader(fieldNumber)); + + return typed; + } + + public override void Write(object untyped, ProtoWriter dest) + { + foreach (var pair in (TDictionary)untyped) + { + ProtoWriter.WriteFieldHeader(fieldNumber, wireType, dest); + var token = ProtoWriter.StartSubItem(null, dest); + if (pair.Key != null) keyTail.Write(pair.Key, dest); + if (pair.Value != null) Tail.Write(pair.Value, dest); + ProtoWriter.EndSubItem(token, dest); + } + } + +#if FEAT_COMPILER + protected override void EmitWrite(CompilerContext ctx, Local valueFrom) + { + Type itemType = typeof(KeyValuePair); + MethodInfo moveNext, current, getEnumerator = ListDecorator.GetEnumeratorInfo(ctx.Model, + ExpectedType, itemType, out moveNext, out current); + Type enumeratorType = getEnumerator.ReturnType; + + MethodInfo key = itemType.GetProperty(nameof(KeyValuePair.Key)).GetGetMethod(), + @value = itemType.GetProperty(nameof(KeyValuePair.Value)).GetGetMethod(); + + using (Compiler.Local list = ctx.GetLocalWithValue(ExpectedType, valueFrom)) + using (Compiler.Local iter = new Compiler.Local(ctx, enumeratorType)) + using (Compiler.Local token = new Compiler.Local(ctx, typeof(SubItemToken))) + using (Compiler.Local kvp = new Compiler.Local(ctx, itemType)) + { + ctx.LoadAddress(list, ExpectedType); + ctx.EmitCall(getEnumerator, ExpectedType); + ctx.StoreValue(iter); + using (ctx.Using(iter)) + { + Compiler.CodeLabel body = ctx.DefineLabel(), next = ctx.DefineLabel(); + ctx.Branch(next, false); + + ctx.MarkLabel(body); + + ctx.LoadAddress(iter, enumeratorType); + ctx.EmitCall(current, enumeratorType); + + if (itemType != ctx.MapType(typeof(object)) && current.ReturnType == ctx.MapType(typeof(object))) + { + ctx.CastFromObject(itemType); + } + ctx.StoreValue(kvp); + + ctx.LoadValue(fieldNumber); + ctx.LoadValue((int)wireType); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("WriteFieldHeader")); + + ctx.LoadNullRef(); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("StartSubItem")); + ctx.StoreValue(token); + + ctx.LoadAddress(kvp, itemType); + ctx.EmitCall(key, itemType); + ctx.WriteNullCheckedTail(typeof(TKey), keyTail, null); + + ctx.LoadAddress(kvp, itemType); + ctx.EmitCall(value, itemType); + ctx.WriteNullCheckedTail(typeof(TValue), Tail, null); + + ctx.LoadValue(token); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("EndSubItem")); + + ctx.MarkLabel(@next); + ctx.LoadAddress(iter, enumeratorType); + ctx.EmitCall(moveNext, enumeratorType); + ctx.BranchIfTrue(body, false); + } + } + } + protected override void EmitRead(CompilerContext ctx, Local valueFrom) + { + using (Compiler.Local list = AppendToCollection ? ctx.GetLocalWithValue(ExpectedType, valueFrom) + : new Compiler.Local(ctx, typeof(TDictionary))) + using (Compiler.Local token = new Compiler.Local(ctx, typeof(SubItemToken))) + using (Compiler.Local key = new Compiler.Local(ctx, typeof(TKey))) + using (Compiler.Local @value = new Compiler.Local(ctx, typeof(TValue))) + using (Compiler.Local fieldNumber = new Compiler.Local(ctx, ctx.MapType(typeof(int)))) + { + if (!AppendToCollection) + { // always new + ctx.LoadNullRef(); + ctx.StoreValue(list); + } + if (concreteType != null) + { + ctx.LoadValue(list); + Compiler.CodeLabel notNull = ctx.DefineLabel(); + ctx.BranchIfTrue(notNull, true); + ctx.EmitCtor(concreteType); + ctx.StoreValue(list); + ctx.MarkLabel(notNull); + } + + var redoFromStart = ctx.DefineLabel(); + ctx.MarkLabel(redoFromStart); + + // key = default(TKey); value = default(TValue); + if (typeof(TKey) == typeof(string)) + { + ctx.LoadValue(""); + ctx.StoreValue(key); + } + else + { + ctx.InitLocal(typeof(TKey), key); + } + if (typeof(TValue) == typeof(string)) + { + ctx.LoadValue(""); + ctx.StoreValue(value); + } + else + { + ctx.InitLocal(typeof(TValue), @value); + } + + // token = ProtoReader.StartSubItem(reader); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("StartSubItem")); + ctx.StoreValue(token); + + Compiler.CodeLabel @continue = ctx.DefineLabel(), processField = ctx.DefineLabel(); + // while ... + ctx.Branch(@continue, false); + + // switch(fieldNumber) + ctx.MarkLabel(processField); + ctx.LoadValue(fieldNumber); + CodeLabel @default = ctx.DefineLabel(), one = ctx.DefineLabel(), two = ctx.DefineLabel(); + ctx.Switch(new[] { @default, one, two }); // zero based, hence explicit 0 + + // case 0: default: reader.SkipField(); + ctx.MarkLabel(@default); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("SkipField")); + ctx.Branch(@continue, false); + + // case 1: key = ... + ctx.MarkLabel(one); + keyTail.EmitRead(ctx, null); + ctx.StoreValue(key); + ctx.Branch(@continue, false); + + // case 2: value = ... + ctx.MarkLabel(two); + Tail.EmitRead(ctx, Tail.RequiresOldValue ? @value : null); + ctx.StoreValue(value); + + // (fieldNumber = reader.ReadFieldHeader()) > 0 + ctx.MarkLabel(@continue); + ctx.EmitBasicRead("ReadFieldHeader", ctx.MapType(typeof(int))); + ctx.CopyValue(); + ctx.StoreValue(fieldNumber); + ctx.LoadValue(0); + ctx.BranchIfGreater(processField, false); + + // ProtoReader.EndSubItem(token, reader); + ctx.LoadValue(token); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("EndSubItem")); + + // list[key] = value; + ctx.LoadAddress(list, ExpectedType); + ctx.LoadValue(key); + ctx.LoadValue(@value); + ctx.EmitCall(indexerSet); + + // while reader.TryReadFieldReader(fieldNumber) + ctx.LoadReaderWriter(); + ctx.LoadValue(this.fieldNumber); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("TryReadFieldHeader")); + ctx.BranchIfTrue(redoFromStart, false); + + if (ReturnsValue) + { + ctx.LoadValue(list); + } + } + } +#endif + } +} diff --git a/Runtime/Protobuf-net/Serializers/MapDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/MapDecorator.cs.meta new file mode 100644 index 0000000..51c4525 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/MapDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 522b5c8a0fa5be14591bc9cbe3b194d3 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/MemberSpecifiedDecorator.cs b/Runtime/Protobuf-net/Serializers/MemberSpecifiedDecorator.cs new file mode 100644 index 0000000..3ee80a5 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/MemberSpecifiedDecorator.cs @@ -0,0 +1,76 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; + +namespace ProtoBuf.Serializers +{ + sealed class MemberSpecifiedDecorator : ProtoDecoratorBase + { + public override Type ExpectedType => Tail.ExpectedType; + + public override bool RequiresOldValue => Tail.RequiresOldValue; + + public override bool ReturnsValue => Tail.ReturnsValue; + + private readonly MethodInfo getSpecified, setSpecified; + public MemberSpecifiedDecorator(MethodInfo getSpecified, MethodInfo setSpecified, IProtoSerializer tail) + : base(tail) + { + if (getSpecified == null && setSpecified == null) throw new InvalidOperationException(); + this.getSpecified = getSpecified; + this.setSpecified = setSpecified; + } + + public override void Write(object value, ProtoWriter dest) + { + if (getSpecified == null || (bool)getSpecified.Invoke(value, null)) + { + Tail.Write(value, dest); + } + } + + public override object Read(object value, ProtoReader source) + { + object result = Tail.Read(value, source); + if (setSpecified != null) setSpecified.Invoke(value, new object[] { true }); + return result; + } + +#if FEAT_COMPILER + protected override void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + if (getSpecified == null) + { + Tail.EmitWrite(ctx, valueFrom); + return; + } + using (Compiler.Local loc = ctx.GetLocalWithValue(ExpectedType, valueFrom)) + { + ctx.LoadAddress(loc, ExpectedType); + ctx.EmitCall(getSpecified); + Compiler.CodeLabel done = ctx.DefineLabel(); + ctx.BranchIfFalse(done, false); + Tail.EmitWrite(ctx, loc); + ctx.MarkLabel(done); + } + + } + protected override void EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + if (setSpecified == null) + { + Tail.EmitRead(ctx, valueFrom); + return; + } + using (Compiler.Local loc = ctx.GetLocalWithValue(ExpectedType, valueFrom)) + { + Tail.EmitRead(ctx, loc); + ctx.LoadAddress(loc, ExpectedType); + ctx.LoadValue(1); // true + ctx.EmitCall(setSpecified); + } + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/MemberSpecifiedDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/MemberSpecifiedDecorator.cs.meta new file mode 100644 index 0000000..f2d6187 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/MemberSpecifiedDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 58836f822e85e2447817d187f3bcd5de +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/NetObjectSerializer.cs b/Runtime/Protobuf-net/Serializers/NetObjectSerializer.cs new file mode 100644 index 0000000..c3d685b --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/NetObjectSerializer.cs @@ -0,0 +1,64 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; +using ProtoBuf.Meta; + +namespace ProtoBuf.Serializers +{ + sealed class NetObjectSerializer : IProtoSerializer + { + private readonly int key; + private readonly Type type; + + private readonly BclHelpers.NetObjectOptions options; + + public NetObjectSerializer(TypeModel model, Type type, int key, BclHelpers.NetObjectOptions options) + { + bool dynamicType = (options & BclHelpers.NetObjectOptions.DynamicType) != 0; + this.key = dynamicType ? -1 : key; + this.type = dynamicType ? model.MapType(typeof(object)) : type; + this.options = options; + } + + public Type ExpectedType => type; + + public bool ReturnsValue => true; + + public bool RequiresOldValue => true; + + public object Read(object value, ProtoReader source) + { + return BclHelpers.ReadNetObject(value, source, key, type == typeof(object) ? null : type, options); + } + + public void Write(object value, ProtoWriter dest) + { + BclHelpers.WriteNetObject(value, dest, key, options); + } + +#if FEAT_COMPILER + public void EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.LoadValue(valueFrom); + ctx.CastToObject(type); + ctx.LoadReaderWriter(); + ctx.LoadValue(ctx.MapMetaKeyToCompiledKey(key)); + if (type == ctx.MapType(typeof(object))) ctx.LoadNullRef(); + else ctx.LoadValue(type); + ctx.LoadValue((int)options); + ctx.EmitCall(ctx.MapType(typeof(BclHelpers)).GetMethod("ReadNetObject")); + ctx.CastFromObject(type); + } + public void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.LoadValue(valueFrom); + ctx.CastToObject(type); + ctx.LoadReaderWriter(); + ctx.LoadValue(ctx.MapMetaKeyToCompiledKey(key)); + ctx.LoadValue((int)options); + ctx.EmitCall(ctx.MapType(typeof(BclHelpers)).GetMethod("WriteNetObject")); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/NetObjectSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/NetObjectSerializer.cs.meta new file mode 100644 index 0000000..f53dfad --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/NetObjectSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: dd6edd815f76150449f39f7571679912 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/NullDecorator.cs b/Runtime/Protobuf-net/Serializers/NullDecorator.cs new file mode 100644 index 0000000..52db14c --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/NullDecorator.cs @@ -0,0 +1,167 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; + +using ProtoBuf.Meta; + +namespace ProtoBuf.Serializers +{ + sealed class NullDecorator : ProtoDecoratorBase + { + private readonly Type expectedType; + public const int Tag = 1; + public NullDecorator(TypeModel model, IProtoSerializer tail) : base(tail) + { + if (!tail.ReturnsValue) + throw new NotSupportedException("NullDecorator only supports implementations that return values"); + + Type tailType = tail.ExpectedType; + if (Helpers.IsValueType(tailType)) + { + expectedType = model.MapType(typeof(Nullable<>)).MakeGenericType(tailType); + } + else + { + expectedType = tailType; + } + } + + public override Type ExpectedType => expectedType; + + public override bool ReturnsValue => true; + + public override bool RequiresOldValue => true; + +#if FEAT_COMPILER + protected override void EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + using (Compiler.Local oldValue = ctx.GetLocalWithValue(expectedType, valueFrom)) + using (Compiler.Local token = new Compiler.Local(ctx, ctx.MapType(typeof(SubItemToken)))) + using (Compiler.Local field = new Compiler.Local(ctx, ctx.MapType(typeof(int)))) + { + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("StartSubItem")); + ctx.StoreValue(token); + + Compiler.CodeLabel next = ctx.DefineLabel(), processField = ctx.DefineLabel(), end = ctx.DefineLabel(); + + ctx.MarkLabel(next); + + ctx.EmitBasicRead("ReadFieldHeader", ctx.MapType(typeof(int))); + ctx.CopyValue(); + ctx.StoreValue(field); + ctx.LoadValue(Tag); // = 1 - process + ctx.BranchIfEqual(processField, true); + ctx.LoadValue(field); + ctx.LoadValue(1); // < 1 - exit + ctx.BranchIfLess(end, false); + + // default: skip + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("SkipField")); + ctx.Branch(next, true); + + // process + ctx.MarkLabel(processField); + if (Tail.RequiresOldValue) + { + if (Helpers.IsValueType(expectedType)) + { + ctx.LoadAddress(oldValue, expectedType); + ctx.EmitCall(expectedType.GetMethod("GetValueOrDefault", Helpers.EmptyTypes)); + } + else + { + ctx.LoadValue(oldValue); + } + } + Tail.EmitRead(ctx, null); + // note we demanded always returns a value + if (Helpers.IsValueType(expectedType)) + { + ctx.EmitCtor(expectedType, Tail.ExpectedType); // re-nullable it + } + ctx.StoreValue(oldValue); + ctx.Branch(next, false); + + // outro + ctx.MarkLabel(end); + + ctx.LoadValue(token); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("EndSubItem")); + ctx.LoadValue(oldValue); // load the old value + } + } + protected override void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + using (Compiler.Local valOrNull = ctx.GetLocalWithValue(expectedType, valueFrom)) + using (Compiler.Local token = new Compiler.Local(ctx, ctx.MapType(typeof(SubItemToken)))) + { + ctx.LoadNullRef(); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("StartSubItem")); + ctx.StoreValue(token); + + if (Helpers.IsValueType(expectedType)) + { + ctx.LoadAddress(valOrNull, expectedType); + ctx.LoadValue(expectedType.GetProperty("HasValue")); + } + else + { + ctx.LoadValue(valOrNull); + } + Compiler.CodeLabel @end = ctx.DefineLabel(); + ctx.BranchIfFalse(@end, false); + if (Helpers.IsValueType(expectedType)) + { + ctx.LoadAddress(valOrNull, expectedType); + ctx.EmitCall(expectedType.GetMethod("GetValueOrDefault", Helpers.EmptyTypes)); + } + else + { + ctx.LoadValue(valOrNull); + } + Tail.EmitWrite(ctx, null); + + ctx.MarkLabel(@end); + + ctx.LoadValue(token); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("EndSubItem")); + } + } +#endif + + public override object Read(object value, ProtoReader source) + { + SubItemToken tok = ProtoReader.StartSubItem(source); + int field; + while ((field = source.ReadFieldHeader()) > 0) + { + if (field == Tag) + { + value = Tail.Read(value, source); + } + else + { + source.SkipField(); + } + } + ProtoReader.EndSubItem(tok, source); + return value; + } + + public override void Write(object value, ProtoWriter dest) + { + SubItemToken token = ProtoWriter.StartSubItem(null, dest); + if (value != null) + { + Tail.Write(value, dest); + } + ProtoWriter.EndSubItem(token, dest); + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/NullDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/NullDecorator.cs.meta new file mode 100644 index 0000000..4fb2a35 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/NullDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 4728d79a9a96bde4097e4c599266a6e4 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/ParseableSerializer.cs b/Runtime/Protobuf-net/Serializers/ParseableSerializer.cs new file mode 100644 index 0000000..9a4bb07 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ParseableSerializer.cs @@ -0,0 +1,111 @@ +#if !NO_RUNTIME +using System; +using System.Net; +using ProtoBuf.Meta; +using System.Reflection; + +namespace ProtoBuf.Serializers +{ + sealed class ParseableSerializer : IProtoSerializer + { + private readonly MethodInfo parse; + public static ParseableSerializer TryCreate(Type type, TypeModel model) + { + if (type == null) throw new ArgumentNullException("type"); +#if PORTABLE || COREFX || PROFILE259 + MethodInfo method = null; + +#if COREFX || PROFILE259 + foreach (MethodInfo tmp in type.GetTypeInfo().GetDeclaredMethods("Parse")) +#else + foreach (MethodInfo tmp in type.GetMethods(BindingFlags.Public | BindingFlags.Static | BindingFlags.DeclaredOnly)) +#endif + { + ParameterInfo[] p; + if (tmp.Name == "Parse" && tmp.IsPublic && tmp.IsStatic && tmp.DeclaringType == type && (p = tmp.GetParameters()) != null && p.Length == 1 && p[0].ParameterType == typeof(string)) + { + method = tmp; + break; + } + } +#else + MethodInfo method = type.GetMethod("Parse", + BindingFlags.Public | BindingFlags.Static | BindingFlags.DeclaredOnly, + null, new Type[] { model.MapType(typeof(string)) }, null); +#endif + if (method != null && method.ReturnType == type) + { + if (Helpers.IsValueType(type)) + { + MethodInfo toString = GetCustomToString(type); + if (toString == null || toString.ReturnType != model.MapType(typeof(string))) return null; // need custom ToString, fools + } + return new ParseableSerializer(method); + } + return null; + } + private static MethodInfo GetCustomToString(Type type) + { +#if PORTABLE || COREFX || PROFILE259 + MethodInfo method = Helpers.GetInstanceMethod(type, "ToString", Helpers.EmptyTypes); + if (method == null || !method.IsPublic || method.IsStatic || method.DeclaringType != type) return null; + return method; +#else + + return type.GetMethod("ToString", BindingFlags.Public | BindingFlags.Instance | BindingFlags.DeclaredOnly, + null, Helpers.EmptyTypes, null); +#endif + } + + private ParseableSerializer(MethodInfo parse) + { + this.parse = parse; + } + + public Type ExpectedType => parse.DeclaringType; + + bool IProtoSerializer.RequiresOldValue { get { return false; } } + bool IProtoSerializer.ReturnsValue { get { return true; } } + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return parse.Invoke(null, new object[] { source.ReadString() }); + } + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteString(value.ToString(), dest); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + Type type = ExpectedType; + if (Helpers.IsValueType(type)) + { // note that for structs, we've already asserted that a custom ToString + // exists; no need to handle the box/callvirt scenario + + // force it to a variable if needed, so we can take the address + using (Compiler.Local loc = ctx.GetLocalWithValue(type, valueFrom)) + { + ctx.LoadAddress(loc, type); + ctx.EmitCall(GetCustomToString(type)); + } + } + else + { + ctx.EmitCall(ctx.MapType(typeof(object)).GetMethod("ToString")); + } + ctx.EmitBasicWrite("WriteString", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadString", ctx.MapType(typeof(string))); + ctx.EmitCall(parse); + } +#endif + + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/ParseableSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/ParseableSerializer.cs.meta new file mode 100644 index 0000000..9ee5ec1 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ParseableSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 562e4dd519901854fba22d511f594b82 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/PropertyDecorator.cs b/Runtime/Protobuf-net/Serializers/PropertyDecorator.cs new file mode 100644 index 0000000..8b0a014 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/PropertyDecorator.cs @@ -0,0 +1,167 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; + +using ProtoBuf.Meta; + +namespace ProtoBuf.Serializers +{ + sealed class PropertyDecorator : ProtoDecoratorBase + { + public override Type ExpectedType => forType; + private readonly PropertyInfo property; + private readonly Type forType; + public override bool RequiresOldValue => true; + public override bool ReturnsValue => false; + private readonly bool readOptionsWriteValue; + private readonly MethodInfo shadowSetter; + + public PropertyDecorator(TypeModel model, Type forType, PropertyInfo property, IProtoSerializer tail) : base(tail) + { + Helpers.DebugAssert(forType != null); + Helpers.DebugAssert(property != null); + this.forType = forType; + this.property = property; + SanityCheck(model, property, tail, out readOptionsWriteValue, true, true); + shadowSetter = GetShadowSetter(model, property); + } + + private static void SanityCheck(TypeModel model, PropertyInfo property, IProtoSerializer tail, out bool writeValue, bool nonPublic, bool allowInternal) + { + if (property == null) throw new ArgumentNullException("property"); + + writeValue = tail.ReturnsValue && (GetShadowSetter(model, property) != null || (property.CanWrite && Helpers.GetSetMethod(property, nonPublic, allowInternal) != null)); + if (!property.CanRead || Helpers.GetGetMethod(property, nonPublic, allowInternal) == null) + { + throw new InvalidOperationException("Cannot serialize property without a get accessor"); + } + if (!writeValue && (!tail.RequiresOldValue || Helpers.IsValueType(tail.ExpectedType))) + { // so we can't save the value, and the tail doesn't use it either... not helpful + // or: can't write the value, so the struct value will be lost + throw new InvalidOperationException("Cannot apply changes to property " + property.DeclaringType.FullName + "." + property.Name); + } + } + static MethodInfo GetShadowSetter(TypeModel model, PropertyInfo property) + { +#if COREFX + MethodInfo method = Helpers.GetInstanceMethod(property.DeclaringType.GetTypeInfo(), "Set" + property.Name, new Type[] { property.PropertyType }); +#else + +#if PROFILE259 + Type reflectedType = property.DeclaringType; +#else + Type reflectedType = property.ReflectedType; +#endif + MethodInfo method = Helpers.GetInstanceMethod(reflectedType, "Set" + property.Name, new Type[] { property.PropertyType }); +#endif + if (method == null || !method.IsPublic || method.ReturnType != model.MapType(typeof(void))) return null; + return method; + } + + public override void Write(object value, ProtoWriter dest) + { + Helpers.DebugAssert(value != null); + value = property.GetValue(value, null); + if (value != null) Tail.Write(value, dest); + } + + public override object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value != null); + + object oldVal = Tail.RequiresOldValue ? property.GetValue(value, null) : null; + object newVal = Tail.Read(oldVal, source); + if (readOptionsWriteValue && newVal != null) // if the tail returns a null, intepret that as *no assign* + { + if (shadowSetter == null) + { + property.SetValue(value, newVal, null); + } + else + { + shadowSetter.Invoke(value, new object[] { newVal }); + } + } + return null; + } + +#if FEAT_COMPILER + protected override void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.LoadAddress(valueFrom, ExpectedType); + ctx.LoadValue(property); + ctx.WriteNullCheckedTail(property.PropertyType, Tail, null); + } + + protected override void EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + SanityCheck(ctx.Model, property, Tail, out bool writeValue, ctx.NonPublic, ctx.AllowInternal(property)); + if (Helpers.IsValueType(ExpectedType) && valueFrom == null) + { + throw new InvalidOperationException("Attempt to mutate struct on the head of the stack; changes would be lost"); + } + + using (Compiler.Local loc = ctx.GetLocalWithValue(ExpectedType, valueFrom)) + { + if (Tail.RequiresOldValue) + { + ctx.LoadAddress(loc, ExpectedType); // stack is: old-addr + ctx.LoadValue(property); // stack is: old-value + } + Type propertyType = property.PropertyType; + ctx.ReadNullCheckedTail(propertyType, Tail, null); // stack is [new-value] + + if (writeValue) + { + using (Compiler.Local newVal = new Compiler.Local(ctx, property.PropertyType)) + { + ctx.StoreValue(newVal); // stack is empty + + Compiler.CodeLabel allDone = new Compiler.CodeLabel(); // <=== default structs + if (!Helpers.IsValueType(propertyType)) + { // if the tail returns a null, intepret that as *no assign* + allDone = ctx.DefineLabel(); + ctx.LoadValue(newVal); // stack is: new-value + ctx.BranchIfFalse(@allDone, true); // stack is empty + } + // assign the value + ctx.LoadAddress(loc, ExpectedType); // parent-addr + ctx.LoadValue(newVal); // parent-obj|new-value + if (shadowSetter == null) + { + ctx.StoreValue(property); // empty + } + else + { + ctx.EmitCall(shadowSetter); // empty + } + if (!Helpers.IsValueType(propertyType)) + { + ctx.MarkLabel(allDone); + } + } + + } + else + { // don't want return value; drop it if anything there + // stack is [new-value] + if (Tail.ReturnsValue) { ctx.DiscardValue(); } + } + } + } +#endif + + internal static bool CanWrite(TypeModel model, MemberInfo member) + { + if (member == null) throw new ArgumentNullException(nameof(member)); + + if (member is PropertyInfo prop) + { + return prop.CanWrite || GetShadowSetter(model, prop) != null; + } + + return member is FieldInfo; // fields are always writeable; anything else: JUST SAY NO! + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/PropertyDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/PropertyDecorator.cs.meta new file mode 100644 index 0000000..5aca94d --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/PropertyDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 2b21fd2b2435fed4da28bd193e2ce80d +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/ProtoDecoratorBase.cs b/Runtime/Protobuf-net/Serializers/ProtoDecoratorBase.cs new file mode 100644 index 0000000..e7f2b34 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ProtoDecoratorBase.cs @@ -0,0 +1,24 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + abstract class ProtoDecoratorBase : IProtoSerializer + { + public abstract Type ExpectedType { get; } + protected readonly IProtoSerializer Tail; + protected ProtoDecoratorBase(IProtoSerializer tail) { this.Tail = tail; } + public abstract bool ReturnsValue { get; } + public abstract bool RequiresOldValue { get; } + public abstract void Write(object value, ProtoWriter dest); + public abstract object Read(object value, ProtoReader source); + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) { EmitWrite(ctx, valueFrom); } + protected abstract void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom); + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) { EmitRead(ctx, valueFrom); } + protected abstract void EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom); +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/ProtoDecoratorBase.cs.meta b/Runtime/Protobuf-net/Serializers/ProtoDecoratorBase.cs.meta new file mode 100644 index 0000000..92acdfc --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ProtoDecoratorBase.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 1e9afbb9465ade140ab0fcd217ea0b66 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/ReflectedUriDecorator.cs b/Runtime/Protobuf-net/Serializers/ReflectedUriDecorator.cs new file mode 100644 index 0000000..44edef0 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ReflectedUriDecorator.cs @@ -0,0 +1,90 @@ +#if !NO_RUNTIME +#if PORTABLE +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; + +namespace ProtoBuf.Serializers +{ + /// + /// Manipulates with uris via reflection rather than strongly typed objects. + /// This is because in PCLs, the Uri type may not match (WinRT uses Internal/Uri, .Net uses System/Uri) + /// + sealed class ReflectedUriDecorator : ProtoDecoratorBase + { + private readonly Type expectedType; + + private readonly PropertyInfo absoluteUriProperty; + + private readonly ConstructorInfo typeConstructor; + + public ReflectedUriDecorator(Type type, ProtoBuf.Meta.TypeModel model, IProtoSerializer tail) : base(tail) + { + expectedType = type; + +#if PROFILE259 + absoluteUriProperty = expectedType.GetRuntimeProperty("AbsoluteUri"); + IEnumerable constructors = expectedType.GetTypeInfo().DeclaredConstructors; + typeConstructor = null; + foreach(ConstructorInfo constructor in constructors) + { + ParameterInfo[] parameters = constructor.GetParameters(); + ParameterInfo parameterFirst = parameters.FirstOrDefault(); + Type stringType = typeof(string); + if (parameterFirst != null && + parameterFirst.ParameterType == stringType) + { + typeConstructor = constructor; + break; + } + } +#else + absoluteUriProperty = expectedType.GetProperty("AbsoluteUri"); + typeConstructor = expectedType.GetConstructor(new Type[] { typeof(string) }); +#endif + } + public override Type ExpectedType { get { return expectedType; } } + public override bool RequiresOldValue { get { return false; } } + public override bool ReturnsValue { get { return true; } } + + public override void Write(object value, ProtoWriter dest) + { + Tail.Write(absoluteUriProperty.GetValue(value, null), dest); + } + public override object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // not expecting incoming + string s = (string)Tail.Read(null, source); + + return s.Length == 0 ? null : typeConstructor.Invoke(new object[] { s }); + } + +#if FEAT_COMPILER + protected override void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.LoadValue(valueFrom); + ctx.LoadValue(absoluteUriProperty); + Tail.EmitWrite(ctx, null); + } + protected override void EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + Tail.EmitRead(ctx, valueFrom); + ctx.CopyValue(); + Compiler.CodeLabel @nonEmpty = ctx.DefineLabel(), @end = ctx.DefineLabel(); + ctx.LoadValue(typeof(string).GetProperty("Length")); + ctx.BranchIfTrue(@nonEmpty, true); + ctx.DiscardValue(); + ctx.LoadNullRef(); + ctx.Branch(@end, true); + ctx.MarkLabel(@nonEmpty); + ctx.EmitCtor(expectedType, ctx.MapType(typeof(string))); + ctx.MarkLabel(@end); + + } +#endif + } +} +#endif +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/ReflectedUriDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/ReflectedUriDecorator.cs.meta new file mode 100644 index 0000000..2441cdf --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/ReflectedUriDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 6cf63470b1970d84ead637b9ee2bface +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/SByteSerializer.cs b/Runtime/Protobuf-net/Serializers/SByteSerializer.cs new file mode 100644 index 0000000..81d233e --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/SByteSerializer.cs @@ -0,0 +1,45 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class SByteSerializer : IProtoSerializer + { + static readonly Type expectedType = typeof(sbyte); + + public SByteSerializer(ProtoBuf.Meta.TypeModel model) + { + + } + + public Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadSByte(); + } + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteSByte((sbyte)value, dest); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteSByte", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadSByte", ExpectedType); + } +#endif + + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/SByteSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/SByteSerializer.cs.meta new file mode 100644 index 0000000..7d71fef --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/SByteSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 09de0f3e56d4834428cb8ba75929d216 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/SingleSerializer.cs b/Runtime/Protobuf-net/Serializers/SingleSerializer.cs new file mode 100644 index 0000000..c5ade13 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/SingleSerializer.cs @@ -0,0 +1,45 @@ +#if !NO_RUNTIME +using System; +using ProtoBuf.Meta; + +namespace ProtoBuf.Serializers +{ + sealed class SingleSerializer : IProtoSerializer + { + static readonly Type expectedType = typeof(float); + + public Type ExpectedType { get { return expectedType; } } + + public SingleSerializer(TypeModel model) + { + } + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadSingle(); + } + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteSingle((float)value, dest); + } + + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteSingle", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadSingle", ExpectedType); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/SingleSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/SingleSerializer.cs.meta new file mode 100644 index 0000000..ee64e5a --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/SingleSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 9b89c111cae2d81469987d208a41af4b +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/StringSerializer.cs b/Runtime/Protobuf-net/Serializers/StringSerializer.cs new file mode 100644 index 0000000..399b4bb --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/StringSerializer.cs @@ -0,0 +1,41 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class StringSerializer : IProtoSerializer + { + static readonly Type expectedType = typeof(string); + + public StringSerializer(ProtoBuf.Meta.TypeModel model) + { + } + + public Type ExpectedType => expectedType; + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteString((string)value, dest); + } + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadString(); + } +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteString", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadString", ExpectedType); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/StringSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/StringSerializer.cs.meta new file mode 100644 index 0000000..697d8c2 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/StringSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 80c8f3efc5f697845b352b56780105ae +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/SubItemSerializer.cs b/Runtime/Protobuf-net/Serializers/SubItemSerializer.cs new file mode 100644 index 0000000..58015aa --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/SubItemSerializer.cs @@ -0,0 +1,138 @@ +#if !NO_RUNTIME +using System; +using ProtoBuf.Meta; + +#if FEAT_COMPILER +using System.Reflection.Emit; +#endif + +namespace ProtoBuf.Serializers +{ + sealed class SubItemSerializer : IProtoTypeSerializer + { + bool IProtoTypeSerializer.HasCallbacks(TypeModel.CallbackType callbackType) + { + return ((IProtoTypeSerializer)proxy.Serializer).HasCallbacks(callbackType); + } + + bool IProtoTypeSerializer.CanCreateInstance() + { + return ((IProtoTypeSerializer)proxy.Serializer).CanCreateInstance(); + } + +#if FEAT_COMPILER + void IProtoTypeSerializer.EmitCallback(Compiler.CompilerContext ctx, Compiler.Local valueFrom, TypeModel.CallbackType callbackType) + { + ((IProtoTypeSerializer)proxy.Serializer).EmitCallback(ctx, valueFrom, callbackType); + } + + void IProtoTypeSerializer.EmitCreateInstance(Compiler.CompilerContext ctx) + { + ((IProtoTypeSerializer)proxy.Serializer).EmitCreateInstance(ctx); + } +#endif + + void IProtoTypeSerializer.Callback(object value, TypeModel.CallbackType callbackType, SerializationContext context) + { + ((IProtoTypeSerializer)proxy.Serializer).Callback(value, callbackType, context); + } + + object IProtoTypeSerializer.CreateInstance(ProtoReader source) + { + return ((IProtoTypeSerializer)proxy.Serializer).CreateInstance(source); + } + + private readonly int key; + private readonly Type type; + private readonly ISerializerProxy proxy; + private readonly bool recursionCheck; + public SubItemSerializer(Type type, int key, ISerializerProxy proxy, bool recursionCheck) + { + this.type = type ?? throw new ArgumentNullException(nameof(type)); + this.proxy = proxy ?? throw new ArgumentNullException(nameof(proxy)); + this.key = key; + this.recursionCheck = recursionCheck; + } + + Type IProtoSerializer.ExpectedType => type; + + bool IProtoSerializer.RequiresOldValue => true; + + bool IProtoSerializer.ReturnsValue => true; + + void IProtoSerializer.Write(object value, ProtoWriter dest) + { + if (recursionCheck) + { + ProtoWriter.WriteObject(value, key, dest); + } + else + { + ProtoWriter.WriteRecursionSafeObject(value, key, dest); + } + } + + object IProtoSerializer.Read(object value, ProtoReader source) + { + return ProtoReader.ReadObject(value, key, source); + } + +#if FEAT_COMPILER + bool EmitDedicatedMethod(Compiler.CompilerContext ctx, Compiler.Local valueFrom, bool read) + { + MethodBuilder method = ctx.GetDedicatedMethod(key, read); + if (method == null) return false; + + using (Compiler.Local token = new ProtoBuf.Compiler.Local(ctx, ctx.MapType(typeof(SubItemToken)))) + { + Type rwType = ctx.MapType(read ? typeof(ProtoReader) : typeof(ProtoWriter)); + ctx.LoadValue(valueFrom); + if (!read) // write requires the object for StartSubItem; read doesn't + { // (if recursion-check is disabled [subtypes] then null is fine too) + if (Helpers.IsValueType(type) || !recursionCheck) { ctx.LoadNullRef(); } + else { ctx.CopyValue(); } + } + ctx.LoadReaderWriter(); + ctx.EmitCall(Helpers.GetStaticMethod(rwType, "StartSubItem", + read ? new Type[] { rwType } : new Type[] { ctx.MapType(typeof(object)), rwType })); + ctx.StoreValue(token); + + // note: value already on the stack + ctx.LoadReaderWriter(); + ctx.EmitCall(method); + // handle inheritance (we will be calling the *base* version of things, + // but we expect Read to return the "type" type) + if (read && type != method.ReturnType) ctx.Cast(this.type); + ctx.LoadValue(token); + ctx.LoadReaderWriter(); + ctx.EmitCall(Helpers.GetStaticMethod(rwType, "EndSubItem", new Type[] { ctx.MapType(typeof(SubItemToken)), rwType })); + } + return true; + } + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + if (!EmitDedicatedMethod(ctx, valueFrom, false)) + { + ctx.LoadValue(valueFrom); + if (Helpers.IsValueType(type)) ctx.CastToObject(type); + ctx.LoadValue(ctx.MapMetaKeyToCompiledKey(key)); // re-map for formality, but would expect identical, else dedicated method + ctx.LoadReaderWriter(); + ctx.EmitCall(Helpers.GetStaticMethod(ctx.MapType(typeof(ProtoWriter)), recursionCheck ? "WriteObject" : "WriteRecursionSafeObject", new Type[] { ctx.MapType(typeof(object)), ctx.MapType(typeof(int)), ctx.MapType(typeof(ProtoWriter)) })); + } + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + if (!EmitDedicatedMethod(ctx, valueFrom, true)) + { + ctx.LoadValue(valueFrom); + if (Helpers.IsValueType(type)) ctx.CastToObject(type); + ctx.LoadValue(ctx.MapMetaKeyToCompiledKey(key)); // re-map for formality, but would expect identical, else dedicated method + ctx.LoadReaderWriter(); + ctx.EmitCall(Helpers.GetStaticMethod(ctx.MapType(typeof(ProtoReader)), "ReadObject")); + ctx.CastFromObject(type); + } + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/SubItemSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/SubItemSerializer.cs.meta new file mode 100644 index 0000000..aaeac6f --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/SubItemSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 6d29a14abe6c62349930c948a6d438c4 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/SurrogateSerializer.cs b/Runtime/Protobuf-net/Serializers/SurrogateSerializer.cs new file mode 100644 index 0000000..86275eb --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/SurrogateSerializer.cs @@ -0,0 +1,157 @@ +#if !NO_RUNTIME +using System; +using ProtoBuf.Meta; +using System.Reflection; + +namespace ProtoBuf.Serializers +{ + sealed class SurrogateSerializer : IProtoTypeSerializer + { + bool IProtoTypeSerializer.HasCallbacks(ProtoBuf.Meta.TypeModel.CallbackType callbackType) { return false; } +#if FEAT_COMPILER + void IProtoTypeSerializer.EmitCallback(Compiler.CompilerContext ctx, Compiler.Local valueFrom, ProtoBuf.Meta.TypeModel.CallbackType callbackType) { } + void IProtoTypeSerializer.EmitCreateInstance(Compiler.CompilerContext ctx) { throw new NotSupportedException(); } +#endif + bool IProtoTypeSerializer.CanCreateInstance() => false; + + object IProtoTypeSerializer.CreateInstance(ProtoReader source) => throw new NotSupportedException(); + + void IProtoTypeSerializer.Callback(object value, ProtoBuf.Meta.TypeModel.CallbackType callbackType, SerializationContext context) { } + + public bool ReturnsValue => false; + + public bool RequiresOldValue => true; + + public Type ExpectedType => forType; + + private readonly Type forType, declaredType; + private readonly MethodInfo toTail, fromTail; + IProtoTypeSerializer rootTail; + + public SurrogateSerializer(TypeModel model, Type forType, Type declaredType, IProtoTypeSerializer rootTail) + { + Helpers.DebugAssert(forType != null, "forType"); + Helpers.DebugAssert(declaredType != null, "declaredType"); + Helpers.DebugAssert(rootTail != null, "rootTail"); + Helpers.DebugAssert(rootTail.RequiresOldValue, "RequiresOldValue"); + Helpers.DebugAssert(!rootTail.ReturnsValue, "ReturnsValue"); + Helpers.DebugAssert(declaredType == rootTail.ExpectedType || Helpers.IsSubclassOf(declaredType, rootTail.ExpectedType)); + this.forType = forType; + this.declaredType = declaredType; + this.rootTail = rootTail; + toTail = GetConversion(model, true); + fromTail = GetConversion(model, false); + } + private static bool HasCast(TypeModel model, Type type, Type from, Type to, out MethodInfo op) + { +#if PROFILE259 + System.Collections.Generic.List list = new System.Collections.Generic.List(); + foreach (var item in type.GetRuntimeMethods()) + { + if (item.IsStatic) list.Add(item); + } + MethodInfo[] found = list.ToArray(); +#else + const BindingFlags flags = BindingFlags.Static | BindingFlags.Public | BindingFlags.NonPublic; + MethodInfo[] found = type.GetMethods(flags); +#endif + ParameterInfo[] paramTypes; + Type convertAttributeType = null; + for (int i = 0; i < found.Length; i++) + { + MethodInfo m = found[i]; + if (m.ReturnType != to) continue; + paramTypes = m.GetParameters(); + if (paramTypes.Length == 1 && paramTypes[0].ParameterType == from) + { + if (convertAttributeType == null) + { + convertAttributeType = model.MapType(typeof(ProtoConverterAttribute), false); + if (convertAttributeType == null) + { // attribute isn't defined in the source assembly: stop looking + break; + } + } + if (m.IsDefined(convertAttributeType, true)) + { + op = m; + return true; + } + } + } + + for (int i = 0; i < found.Length; i++) + { + MethodInfo m = found[i]; + if ((m.Name != "op_Implicit" && m.Name != "op_Explicit") || m.ReturnType != to) + { + continue; + } + paramTypes = m.GetParameters(); + if (paramTypes.Length == 1 && paramTypes[0].ParameterType == from) + { + op = m; + return true; + } + } + op = null; + return false; + } + + public MethodInfo GetConversion(TypeModel model, bool toTail) + { + Type to = toTail ? declaredType : forType; + Type from = toTail ? forType : declaredType; + MethodInfo op; + if (HasCast(model, declaredType, from, to, out op) || HasCast(model, forType, from, to, out op)) + { + return op; + } + throw new InvalidOperationException("No suitable conversion operator found for surrogate: " + + forType.FullName + " / " + declaredType.FullName); + } + + public void Write(object value, ProtoWriter writer) + { + rootTail.Write(toTail.Invoke(null, new object[] { value }), writer); + } + + public object Read(object value, ProtoReader source) + { + // convert the incoming value + object[] args = { value }; + value = toTail.Invoke(null, args); + + // invoke the tail and convert the outgoing value + args[0] = rootTail.Read(value, source); + return fromTail.Invoke(null, args); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + Helpers.DebugAssert(valueFrom != null); // don't support stack-head for this + using (Compiler.Local converted = new Compiler.Local(ctx, declaredType)) // declare/re-use local + { + ctx.LoadValue(valueFrom); // load primary onto stack + ctx.EmitCall(toTail); // static convert op, primary-to-surrogate + ctx.StoreValue(converted); // store into surrogate local + + rootTail.EmitRead(ctx, converted); // downstream processing against surrogate local + + ctx.LoadValue(converted); // load from surrogate local + ctx.EmitCall(fromTail); // static convert op, surrogate-to-primary + ctx.StoreValue(valueFrom); // store back into primary + } + } + + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.LoadValue(valueFrom); + ctx.EmitCall(toTail); + rootTail.EmitWrite(ctx, null); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/SurrogateSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/SurrogateSerializer.cs.meta new file mode 100644 index 0000000..39cf419 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/SurrogateSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 517911a690de2bd4c97e81f35104a81a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/SystemTypeSerializer.cs b/Runtime/Protobuf-net/Serializers/SystemTypeSerializer.cs new file mode 100644 index 0000000..4b1656d --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/SystemTypeSerializer.cs @@ -0,0 +1,46 @@ +using System; + +#if !NO_RUNTIME + +namespace ProtoBuf.Serializers +{ + sealed class SystemTypeSerializer : IProtoSerializer + { + static readonly Type expectedType = typeof(Type); + + public SystemTypeSerializer(ProtoBuf.Meta.TypeModel model) + { + + } + + public Type ExpectedType => expectedType; + + void IProtoSerializer.Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteType((Type)value, dest); + } + + object IProtoSerializer.Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadType(); + } + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteType", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadType", ExpectedType); + } +#endif + } +} + +#endif diff --git a/Runtime/Protobuf-net/Serializers/SystemTypeSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/SystemTypeSerializer.cs.meta new file mode 100644 index 0000000..4f43a1f --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/SystemTypeSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: f3b3869287521554a816dba994928f83 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/TagDecorator.cs b/Runtime/Protobuf-net/Serializers/TagDecorator.cs new file mode 100644 index 0000000..509b8a0 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/TagDecorator.cs @@ -0,0 +1,108 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; + +using ProtoBuf.Meta; + +namespace ProtoBuf.Serializers +{ + sealed class TagDecorator : ProtoDecoratorBase, IProtoTypeSerializer + { + public bool HasCallbacks(TypeModel.CallbackType callbackType) + { + IProtoTypeSerializer pts = Tail as IProtoTypeSerializer; + return pts != null && pts.HasCallbacks(callbackType); + } + + public bool CanCreateInstance() + { + IProtoTypeSerializer pts = Tail as IProtoTypeSerializer; + return pts != null && pts.CanCreateInstance(); + } + + public object CreateInstance(ProtoReader source) + { + return ((IProtoTypeSerializer)Tail).CreateInstance(source); + } + + public void Callback(object value, TypeModel.CallbackType callbackType, SerializationContext context) + { + if (Tail is IProtoTypeSerializer pts) + { + pts.Callback(value, callbackType, context); + } + } + +#if FEAT_COMPILER + public void EmitCallback(Compiler.CompilerContext ctx, Compiler.Local valueFrom, TypeModel.CallbackType callbackType) + { + // we only expect this to be invoked if HasCallbacks returned true, so implicitly Tail + // **must** be of the correct type + ((IProtoTypeSerializer)Tail).EmitCallback(ctx, valueFrom, callbackType); + } + + public void EmitCreateInstance(Compiler.CompilerContext ctx) + { + ((IProtoTypeSerializer)Tail).EmitCreateInstance(ctx); + } +#endif + public override Type ExpectedType => Tail.ExpectedType; + + public TagDecorator(int fieldNumber, WireType wireType, bool strict, IProtoSerializer tail) + : base(tail) + { + this.fieldNumber = fieldNumber; + this.wireType = wireType; + this.strict = strict; + } + + public override bool RequiresOldValue => Tail.RequiresOldValue; + + public override bool ReturnsValue => Tail.ReturnsValue; + + private readonly bool strict; + private readonly int fieldNumber; + private readonly WireType wireType; + + private bool NeedsHint => ((int)wireType & ~7) != 0; + + public override object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(fieldNumber == source.FieldNumber); + if (strict) { source.Assert(wireType); } + else if (NeedsHint) { source.Hint(wireType); } + return Tail.Read(value, source); + } + + public override void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteFieldHeader(fieldNumber, wireType, dest); + Tail.Write(value, dest); + } + + +#if FEAT_COMPILER + protected override void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.LoadValue((int)fieldNumber); + ctx.LoadValue((int)wireType); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("WriteFieldHeader")); + Tail.EmitWrite(ctx, valueFrom); + } + + protected override void EmitRead(ProtoBuf.Compiler.CompilerContext ctx, ProtoBuf.Compiler.Local valueFrom) + { + if (strict || NeedsHint) + { + ctx.LoadReaderWriter(); + ctx.LoadValue((int)wireType); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod(strict ? "Assert" : "Hint")); + } + Tail.EmitRead(ctx, valueFrom); + } +#endif + } + +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/TagDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/TagDecorator.cs.meta new file mode 100644 index 0000000..e522eda --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/TagDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: f807f0cbd3358f6479b78ab47c89ad48 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/TimeSpanSerializer.cs b/Runtime/Protobuf-net/Serializers/TimeSpanSerializer.cs new file mode 100644 index 0000000..4c8b828 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/TimeSpanSerializer.cs @@ -0,0 +1,63 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class TimeSpanSerializer : IProtoSerializer + { + static readonly Type expectedType = typeof(TimeSpan); + private readonly bool wellKnown; + public TimeSpanSerializer(DataFormat dataFormat, ProtoBuf.Meta.TypeModel model) + { + + wellKnown = dataFormat == DataFormat.WellKnown; + } + public Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + if (wellKnown) + { + return BclHelpers.ReadDuration(source); + } + else + { + Helpers.DebugAssert(value == null); // since replaces + return BclHelpers.ReadTimeSpan(source); + } + } + + public void Write(object value, ProtoWriter dest) + { + if (wellKnown) + { + BclHelpers.WriteDuration((TimeSpan)value, dest); + } + else + { + BclHelpers.WriteTimeSpan((TimeSpan)value, dest); + } + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitWrite(ctx.MapType(typeof(BclHelpers)), + wellKnown ? nameof(BclHelpers.WriteDuration) : nameof(BclHelpers.WriteTimeSpan), valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + if (wellKnown) ctx.LoadValue(valueFrom); + ctx.EmitBasicRead(ctx.MapType(typeof(BclHelpers)), + wellKnown ? nameof(BclHelpers.ReadDuration) : nameof(BclHelpers.ReadTimeSpan), + ExpectedType); + } +#endif + + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/TimeSpanSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/TimeSpanSerializer.cs.meta new file mode 100644 index 0000000..013bd6e --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/TimeSpanSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 88ce40638421a9d4abd295d84d1991e8 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/TupleSerializer.cs b/Runtime/Protobuf-net/Serializers/TupleSerializer.cs new file mode 100644 index 0000000..b6f9c69 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/TupleSerializer.cs @@ -0,0 +1,339 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; +using ProtoBuf.Meta; + +namespace ProtoBuf.Serializers +{ + sealed class TupleSerializer : IProtoTypeSerializer + { + private readonly MemberInfo[] members; + private readonly ConstructorInfo ctor; + private IProtoSerializer[] tails; + public TupleSerializer(RuntimeTypeModel model, ConstructorInfo ctor, MemberInfo[] members) + { + this.ctor = ctor ?? throw new ArgumentNullException(nameof(ctor)); + this.members = members ?? throw new ArgumentNullException(nameof(members)); + this.tails = new IProtoSerializer[members.Length]; + + ParameterInfo[] parameters = ctor.GetParameters(); + for (int i = 0; i < members.Length; i++) + { + WireType wireType; + Type finalType = parameters[i].ParameterType; + + Type itemType = null, defaultType = null; + + MetaType.ResolveListTypes(model, finalType, ref itemType, ref defaultType); + Type tmp = itemType == null ? finalType : itemType; + + bool asReference = false; + int typeIndex = model.FindOrAddAuto(tmp, false, true, false); + if (typeIndex >= 0) + { + asReference = model[tmp].AsReferenceDefault; + } + IProtoSerializer tail = ValueMember.TryGetCoreSerializer(model, DataFormat.Default, tmp, out wireType, asReference, false, false, true), serializer; + if (tail == null) + { + throw new InvalidOperationException("No serializer defined for type: " + tmp.FullName); + } + + tail = new TagDecorator(i + 1, wireType, false, tail); + if (itemType == null) + { + serializer = tail; + } + else + { + if (finalType.IsArray) + { + serializer = new ArrayDecorator(model, tail, i + 1, false, wireType, finalType, false, false); + } + else + { + serializer = ListDecorator.Create(model, finalType, defaultType, tail, i + 1, false, wireType, true, false, false); + } + } + tails[i] = serializer; + } + } + public bool HasCallbacks(Meta.TypeModel.CallbackType callbackType) + { + return false; + } + +#if FEAT_COMPILER + public void EmitCallback(Compiler.CompilerContext ctx, Compiler.Local valueFrom, Meta.TypeModel.CallbackType callbackType) { } +#endif + public Type ExpectedType => ctor.DeclaringType; + + void IProtoTypeSerializer.Callback(object value, Meta.TypeModel.CallbackType callbackType, SerializationContext context) { } + object IProtoTypeSerializer.CreateInstance(ProtoReader source) { throw new NotSupportedException(); } + private object GetValue(object obj, int index) + { + PropertyInfo prop; + FieldInfo field; + + if ((prop = members[index] as PropertyInfo) != null) + { + if (obj == null) + return Helpers.IsValueType(prop.PropertyType) ? Activator.CreateInstance(prop.PropertyType) : null; + return prop.GetValue(obj, null); + } + else if ((field = members[index] as FieldInfo) != null) + { + if (obj == null) + return Helpers.IsValueType(field.FieldType) ? Activator.CreateInstance(field.FieldType) : null; + return field.GetValue(obj); + } + else + { + throw new InvalidOperationException(); + } + } + + public object Read(object value, ProtoReader source) + { + object[] values = new object[members.Length]; + bool invokeCtor = false; + if (value == null) + { + invokeCtor = true; + } + for (int i = 0; i < values.Length; i++) + values[i] = GetValue(value, i); + int field; + while ((field = source.ReadFieldHeader()) > 0) + { + invokeCtor = true; + if (field <= tails.Length) + { + IProtoSerializer tail = tails[field - 1]; + values[field - 1] = tails[field - 1].Read(tail.RequiresOldValue ? values[field - 1] : null, source); + } + else + { + source.SkipField(); + } + } + return invokeCtor ? ctor.Invoke(values) : value; + } + + public void Write(object value, ProtoWriter dest) + { + for (int i = 0; i < tails.Length; i++) + { + object val = GetValue(value, i); + if (val != null) tails[i].Write(val, dest); + } + } + + public bool RequiresOldValue => true; + + public bool ReturnsValue => false; + + Type GetMemberType(int index) + { + Type result = Helpers.GetMemberType(members[index]); + if (result == null) throw new InvalidOperationException(); + return result; + } + + bool IProtoTypeSerializer.CanCreateInstance() { return false; } + +#if FEAT_COMPILER + public void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + using (Compiler.Local loc = ctx.GetLocalWithValue(ctor.DeclaringType, valueFrom)) + { + for (int i = 0; i < tails.Length; i++) + { + Type type = GetMemberType(i); + ctx.LoadAddress(loc, ExpectedType); + if (members[i] is FieldInfo) + { + ctx.LoadValue((FieldInfo)members[i]); + } + else if (members[i] is PropertyInfo) + { + ctx.LoadValue((PropertyInfo)members[i]); + } + ctx.WriteNullCheckedTail(type, tails[i], null); + } + } + } + + void IProtoTypeSerializer.EmitCreateInstance(Compiler.CompilerContext ctx) { throw new NotSupportedException(); } + + public void EmitRead(Compiler.CompilerContext ctx, Compiler.Local incoming) + { + using (Compiler.Local objValue = ctx.GetLocalWithValue(ExpectedType, incoming)) + { + Compiler.Local[] locals = new Compiler.Local[members.Length]; + try + { + for (int i = 0; i < locals.Length; i++) + { + Type type = GetMemberType(i); + bool store = true; + locals[i] = new Compiler.Local(ctx, type); + if (!Helpers.IsValueType(ExpectedType)) + { + // value-types always read the old value + if (Helpers.IsValueType(type)) + { + switch (Helpers.GetTypeCode(type)) + { + case ProtoTypeCode.Boolean: + case ProtoTypeCode.Byte: + case ProtoTypeCode.Int16: + case ProtoTypeCode.Int32: + case ProtoTypeCode.SByte: + case ProtoTypeCode.UInt16: + case ProtoTypeCode.UInt32: + ctx.LoadValue(0); + break; + case ProtoTypeCode.Int64: + case ProtoTypeCode.UInt64: + ctx.LoadValue(0L); + break; + case ProtoTypeCode.Single: + ctx.LoadValue(0.0F); + break; + case ProtoTypeCode.Double: + ctx.LoadValue(0.0D); + break; + case ProtoTypeCode.Decimal: + ctx.LoadValue(0M); + break; + case ProtoTypeCode.Guid: + ctx.LoadValue(Guid.Empty); + break; + default: + ctx.LoadAddress(locals[i], type); + ctx.EmitCtor(type); + store = false; + break; + } + } + else + { + ctx.LoadNullRef(); + } + if (store) + { + ctx.StoreValue(locals[i]); + } + } + } + + Compiler.CodeLabel skipOld = Helpers.IsValueType(ExpectedType) + ? new Compiler.CodeLabel() + : ctx.DefineLabel(); + if (!Helpers.IsValueType(ExpectedType)) + { + ctx.LoadAddress(objValue, ExpectedType); + ctx.BranchIfFalse(skipOld, false); + } + for (int i = 0; i < members.Length; i++) + { + ctx.LoadAddress(objValue, ExpectedType); + if (members[i] is FieldInfo) + { + ctx.LoadValue((FieldInfo)members[i]); + } + else if (members[i] is PropertyInfo) + { + ctx.LoadValue((PropertyInfo)members[i]); + } + ctx.StoreValue(locals[i]); + } + + if (!Helpers.IsValueType(ExpectedType)) ctx.MarkLabel(skipOld); + + using (Compiler.Local fieldNumber = new Compiler.Local(ctx, ctx.MapType(typeof(int)))) + { + Compiler.CodeLabel @continue = ctx.DefineLabel(), + processField = ctx.DefineLabel(), + notRecognised = ctx.DefineLabel(); + ctx.Branch(@continue, false); + + Compiler.CodeLabel[] handlers = new Compiler.CodeLabel[members.Length]; + for (int i = 0; i < members.Length; i++) + { + handlers[i] = ctx.DefineLabel(); + } + + ctx.MarkLabel(processField); + + ctx.LoadValue(fieldNumber); + ctx.LoadValue(1); + ctx.Subtract(); // jump-table is zero-based + ctx.Switch(handlers); + + // and the default: + ctx.Branch(notRecognised, false); + for (int i = 0; i < handlers.Length; i++) + { + ctx.MarkLabel(handlers[i]); + IProtoSerializer tail = tails[i]; + Compiler.Local oldValIfNeeded = tail.RequiresOldValue ? locals[i] : null; + ctx.ReadNullCheckedTail(locals[i].Type, tail, oldValIfNeeded); + if (tail.ReturnsValue) + { + if (Helpers.IsValueType(locals[i].Type)) + { + ctx.StoreValue(locals[i]); + } + else + { + Compiler.CodeLabel hasValue = ctx.DefineLabel(), allDone = ctx.DefineLabel(); + + ctx.CopyValue(); + ctx.BranchIfTrue(hasValue, true); // interpret null as "don't assign" + ctx.DiscardValue(); + ctx.Branch(allDone, true); + ctx.MarkLabel(hasValue); + ctx.StoreValue(locals[i]); + ctx.MarkLabel(allDone); + } + } + ctx.Branch(@continue, false); + } + + ctx.MarkLabel(notRecognised); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("SkipField")); + + ctx.MarkLabel(@continue); + ctx.EmitBasicRead("ReadFieldHeader", ctx.MapType(typeof(int))); + ctx.CopyValue(); + ctx.StoreValue(fieldNumber); + ctx.LoadValue(0); + ctx.BranchIfGreater(processField, false); + } + for (int i = 0; i < locals.Length; i++) + { + ctx.LoadValue(locals[i]); + } + + ctx.EmitCtor(ctor); + ctx.StoreValue(objValue); + } + finally + { + for (int i = 0; i < locals.Length; i++) + { + if (locals[i] != null) + locals[i].Dispose(); // release for re-use + } + } + } + + } +#endif + } +} + +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/TupleSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/TupleSerializer.cs.meta new file mode 100644 index 0000000..df99bbe --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/TupleSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 7e57d0cd813299f40a1a32236d9931a9 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/TypeSerializer.cs b/Runtime/Protobuf-net/Serializers/TypeSerializer.cs new file mode 100644 index 0000000..d851b47 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/TypeSerializer.cs @@ -0,0 +1,798 @@ +#if !NO_RUNTIME +using System; +using ProtoBuf.Meta; +#if FEAT_COMPILER + +#endif + +using System.Reflection; + +namespace ProtoBuf.Serializers +{ + sealed class TypeSerializer : IProtoTypeSerializer + { + public bool HasCallbacks(TypeModel.CallbackType callbackType) + { + if (callbacks != null && callbacks[callbackType] != null) return true; + for (int i = 0; i < serializers.Length; i++) + { + if (serializers[i].ExpectedType != forType && ((IProtoTypeSerializer)serializers[i]).HasCallbacks(callbackType)) return true; + } + return false; + } + private readonly Type forType, constructType; +#if COREFX || PROFILE259 + private readonly TypeInfo typeInfo; +#endif + public Type ExpectedType { get { return forType; } } + private readonly IProtoSerializer[] serializers; + private readonly int[] fieldNumbers; + private readonly bool isRootType, useConstructor, isExtensible, hasConstructor; + private readonly CallbackSet callbacks; + private readonly MethodInfo[] baseCtorCallbacks; + private readonly MethodInfo factory; + public TypeSerializer(TypeModel model, Type forType, int[] fieldNumbers, IProtoSerializer[] serializers, MethodInfo[] baseCtorCallbacks, bool isRootType, bool useConstructor, CallbackSet callbacks, Type constructType, MethodInfo factory) + { + Helpers.DebugAssert(forType != null); + Helpers.DebugAssert(fieldNumbers != null); + Helpers.DebugAssert(serializers != null); + Helpers.DebugAssert(fieldNumbers.Length == serializers.Length); + + Helpers.Sort(fieldNumbers, serializers); + bool hasSubTypes = false; + for (int i = 0; i < fieldNumbers.Length; i++) + { + if (i != 0 && fieldNumbers[i] == fieldNumbers[i - 1]) throw new InvalidOperationException("Duplicate field-number detected; " + + fieldNumbers[i].ToString() + " on: " + forType.FullName); + if (!hasSubTypes && serializers[i].ExpectedType != forType) + { + hasSubTypes = true; + } + } + this.forType = forType; + this.factory = factory; +#if COREFX || PROFILE259 + this.typeInfo = forType.GetTypeInfo(); +#endif + if (constructType == null) + { + constructType = forType; + } + else + { +#if COREFX || PROFILE259 + if (!typeInfo.IsAssignableFrom(constructType.GetTypeInfo())) +#else + if (!forType.IsAssignableFrom(constructType)) +#endif + { + throw new InvalidOperationException(forType.FullName + " cannot be assigned from " + constructType.FullName); + } + } + this.constructType = constructType; + this.serializers = serializers; + this.fieldNumbers = fieldNumbers; + this.callbacks = callbacks; + this.isRootType = isRootType; + this.useConstructor = useConstructor; + + if (baseCtorCallbacks != null && baseCtorCallbacks.Length == 0) baseCtorCallbacks = null; + this.baseCtorCallbacks = baseCtorCallbacks; + + if (Helpers.GetUnderlyingType(forType) != null) + { + throw new ArgumentException("Cannot create a TypeSerializer for nullable types", "forType"); + } + +#if COREFX || PROFILE259 + if (iextensible.IsAssignableFrom(typeInfo)) + { + if (typeInfo.IsValueType || !isRootType || hasSubTypes) +#else + if (model.MapType(iextensible).IsAssignableFrom(forType)) + { + if (forType.IsValueType || !isRootType || hasSubTypes) +#endif + { + throw new NotSupportedException("IExtensible is not supported in structs or classes with inheritance"); + } + isExtensible = true; + } +#if COREFX || PROFILE259 + TypeInfo constructTypeInfo = constructType.GetTypeInfo(); + hasConstructor = !constructTypeInfo.IsAbstract && Helpers.GetConstructor(constructTypeInfo, Helpers.EmptyTypes, true) != null; +#else + hasConstructor = !constructType.IsAbstract && Helpers.GetConstructor(constructType, Helpers.EmptyTypes, true) != null; +#endif + if (constructType != forType && useConstructor && !hasConstructor) + { + throw new ArgumentException("The supplied default implementation cannot be created: " + constructType.FullName, "constructType"); + } + } +#if COREFX || PROFILE259 + private static readonly TypeInfo iextensible = typeof(IExtensible).GetTypeInfo(); +#else + private static readonly System.Type iextensible = typeof(IExtensible); +#endif + + private bool CanHaveInheritance + { + get + { +#if COREFX || PROFILE259 + return (typeInfo.IsClass || typeInfo.IsInterface) && !typeInfo.IsSealed; +#else + return (forType.IsClass || forType.IsInterface) && !forType.IsSealed; +#endif + } + } + + bool IProtoTypeSerializer.CanCreateInstance() { return true; } + + object IProtoTypeSerializer.CreateInstance(ProtoReader source) + { + return CreateInstance(source, false); + } + public void Callback(object value, TypeModel.CallbackType callbackType, SerializationContext context) + { + if (callbacks != null) InvokeCallback(callbacks[callbackType], value, context); + IProtoTypeSerializer ser = (IProtoTypeSerializer)GetMoreSpecificSerializer(value); + if (ser != null) ser.Callback(value, callbackType, context); + } + private IProtoSerializer GetMoreSpecificSerializer(object value) + { + if (!CanHaveInheritance) return null; + Type actualType = value.GetType(); + if (actualType == forType) return null; + + for (int i = 0; i < serializers.Length; i++) + { + IProtoSerializer ser = serializers[i]; + if (ser.ExpectedType != forType && Helpers.IsAssignableFrom(ser.ExpectedType, actualType)) + { + return ser; + } + } + if (actualType == constructType) return null; // needs to be last in case the default concrete type is also a known sub-type + TypeModel.ThrowUnexpectedSubtype(forType, actualType); // might throw (if not a proxy) + return null; + } + + public void Write(object value, ProtoWriter dest) + { + if (isRootType) Callback(value, TypeModel.CallbackType.BeforeSerialize, dest.Context); + // write inheritance first + IProtoSerializer next = GetMoreSpecificSerializer(value); + if (next != null) next.Write(value, dest); + + // write all actual fields + //Helpers.DebugWriteLine(">> Writing fields for " + forType.FullName); + for (int i = 0; i < serializers.Length; i++) + { + IProtoSerializer ser = serializers[i]; + if (ser.ExpectedType == forType) + { + //Helpers.DebugWriteLine(": " + ser.ToString()); + ser.Write(value, dest); + } + } + //Helpers.DebugWriteLine("<< Writing fields for " + forType.FullName); + if (isExtensible) ProtoWriter.AppendExtensionData((IExtensible)value, dest); + if (isRootType) Callback(value, TypeModel.CallbackType.AfterSerialize, dest.Context); + } + + public object Read(object value, ProtoReader source) + { + if (isRootType && value != null) { Callback(value, TypeModel.CallbackType.BeforeDeserialize, source.Context); } + int fieldNumber, lastFieldNumber = 0, lastFieldIndex = 0; + bool fieldHandled; + + //Helpers.DebugWriteLine(">> Reading fields for " + forType.FullName); + while ((fieldNumber = source.ReadFieldHeader()) > 0) + { + fieldHandled = false; + if (fieldNumber < lastFieldNumber) + { + lastFieldNumber = lastFieldIndex = 0; + } + for (int i = lastFieldIndex; i < fieldNumbers.Length; i++) + { + if (fieldNumbers[i] == fieldNumber) + { + IProtoSerializer ser = serializers[i]; + //Helpers.DebugWriteLine(": " + ser.ToString()); + Type serType = ser.ExpectedType; + if (value == null) + { + if (serType == forType) value = CreateInstance(source, true); + } + else + { + if (serType != forType && ((IProtoTypeSerializer)ser).CanCreateInstance() + && serType +#if COREFX || PROFILE259 + .GetTypeInfo() +#endif + .IsSubclassOf(value.GetType())) + { + value = ProtoReader.Merge(source, value, ((IProtoTypeSerializer)ser).CreateInstance(source)); + } + } + + if (ser.ReturnsValue) + { + value = ser.Read(value, source); + } + else + { // pop + ser.Read(value, source); + } + + lastFieldIndex = i; + lastFieldNumber = fieldNumber; + fieldHandled = true; + break; + } + } + if (!fieldHandled) + { + //Helpers.DebugWriteLine(": [" + fieldNumber + "] (unknown)"); + if (value == null) value = CreateInstance(source, true); + if (isExtensible) + { + source.AppendExtensionData((IExtensible)value); + } + else + { + source.SkipField(); + } + } + } + //Helpers.DebugWriteLine("<< Reading fields for " + forType.FullName); + if (value == null) value = CreateInstance(source, true); + if (isRootType) { Callback(value, TypeModel.CallbackType.AfterDeserialize, source.Context); } + return value; + } + + private object InvokeCallback(MethodInfo method, object obj, SerializationContext context) + { + object result = null; + object[] args; + if (method != null) + { // pass in a streaming context if one is needed, else null + bool handled; + ParameterInfo[] parameters = method.GetParameters(); + switch (parameters.Length) + { + case 0: + args = null; + handled = true; + break; + default: + args = new object[parameters.Length]; + handled = true; + for (int i = 0; i < args.Length; i++) + { + object val; + Type paramType = parameters[i].ParameterType; + if (paramType == typeof(SerializationContext)) val = context; + else if (paramType == typeof(System.Type)) val = constructType; +#if PLAT_BINARYFORMATTER + else if (paramType == typeof(System.Runtime.Serialization.StreamingContext)) val = (System.Runtime.Serialization.StreamingContext)context; +#endif + else + { + val = null; + handled = false; + } + args[i] = val; + } + break; + } + if (handled) + { + result = method.Invoke(obj, args); + } + else + { + throw Meta.CallbackSet.CreateInvalidCallbackSignature(method); + } + + } + return result; + } + object CreateInstance(ProtoReader source, bool includeLocalCallback) + { + //Helpers.DebugWriteLine("* creating : " + forType.FullName); + object obj; + if (factory != null) + { + obj = InvokeCallback(factory, null, source.Context); + } + else if (useConstructor) + { + if (!hasConstructor) TypeModel.ThrowCannotCreateInstance(constructType); +#if PROFILE259 + ConstructorInfo constructorInfo = System.Linq.Enumerable.First( + constructType.GetTypeInfo().DeclaredConstructors, c => c.GetParameters().Length == 0); + obj = constructorInfo.Invoke(new object[] {}); + +#else + obj = Activator.CreateInstance(constructType +#if !(CF || PORTABLE || NETSTANDARD1_3 || NETSTANDARD1_4 || UAP) + , nonPublic: true +#endif + ); +#endif + } + else + { + obj = BclHelpers.GetUninitializedObject(constructType); + } + ProtoReader.NoteObject(obj, source); + if (baseCtorCallbacks != null) + { + for (int i = 0; i < baseCtorCallbacks.Length; i++) + { + InvokeCallback(baseCtorCallbacks[i], obj, source.Context); + } + } + if (includeLocalCallback && callbacks != null) InvokeCallback(callbacks.BeforeDeserialize, obj, source.Context); + return obj; + } + + bool IProtoSerializer.RequiresOldValue { get { return true; } } + bool IProtoSerializer.ReturnsValue { get { return false; } } // updates field directly +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + Type expected = ExpectedType; + using (Compiler.Local loc = ctx.GetLocalWithValue(expected, valueFrom)) + { + // pre-callbacks + EmitCallbackIfNeeded(ctx, loc, TypeModel.CallbackType.BeforeSerialize); + + Compiler.CodeLabel startFields = ctx.DefineLabel(); + // inheritance + if (CanHaveInheritance) + { + for (int i = 0; i < serializers.Length; i++) + { + IProtoSerializer ser = serializers[i]; + Type serType = ser.ExpectedType; + if (serType != forType) + { + Compiler.CodeLabel ifMatch = ctx.DefineLabel(), nextTest = ctx.DefineLabel(); + ctx.LoadValue(loc); + ctx.TryCast(serType); + ctx.CopyValue(); + ctx.BranchIfTrue(ifMatch, true); + ctx.DiscardValue(); + ctx.Branch(nextTest, true); + ctx.MarkLabel(ifMatch); + if (Helpers.IsValueType(serType)) + { + ctx.DiscardValue(); + ctx.LoadValue(loc); + ctx.CastFromObject(serType); + } + ser.EmitWrite(ctx, null); + ctx.Branch(startFields, false); + ctx.MarkLabel(nextTest); + } + } + + + if (constructType != null && constructType != forType) + { + using (Compiler.Local actualType = new Compiler.Local(ctx, ctx.MapType(typeof(System.Type)))) + { + // would have jumped to "fields" if an expected sub-type, so two options: + // a: *exactly* that type, b: an *unexpected* type + ctx.LoadValue(loc); + ctx.EmitCall(ctx.MapType(typeof(object)).GetMethod("GetType")); + ctx.CopyValue(); + ctx.StoreValue(actualType); + ctx.LoadValue(forType); + ctx.BranchIfEqual(startFields, true); + + ctx.LoadValue(actualType); + ctx.LoadValue(constructType); + ctx.BranchIfEqual(startFields, true); + } + } + else + { + // would have jumped to "fields" if an expected sub-type, so two options: + // a: *exactly* that type, b: an *unexpected* type + ctx.LoadValue(loc); + ctx.EmitCall(ctx.MapType(typeof(object)).GetMethod("GetType")); + ctx.LoadValue(forType); + ctx.BranchIfEqual(startFields, true); + } + // unexpected, then... note that this *might* be a proxy, which + // is handled by ThrowUnexpectedSubtype + ctx.LoadValue(forType); + ctx.LoadValue(loc); + ctx.EmitCall(ctx.MapType(typeof(object)).GetMethod("GetType")); + ctx.EmitCall(ctx.MapType(typeof(TypeModel)).GetMethod("ThrowUnexpectedSubtype", + BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static)); + + } + // fields + + ctx.MarkLabel(startFields); + for (int i = 0; i < serializers.Length; i++) + { + IProtoSerializer ser = serializers[i]; + if (ser.ExpectedType == forType) ser.EmitWrite(ctx, loc); + } + + // extension data + if (isExtensible) + { + ctx.LoadValue(loc); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoWriter)).GetMethod("AppendExtensionData")); + } + // post-callbacks + EmitCallbackIfNeeded(ctx, loc, TypeModel.CallbackType.AfterSerialize); + } + } + static void EmitInvokeCallback(Compiler.CompilerContext ctx, MethodInfo method, bool copyValue, Type constructType, Type type) + { + if (method != null) + { + if (copyValue) ctx.CopyValue(); // assumes the target is on the stack, and that we want to *retain* it on the stack + ParameterInfo[] parameters = method.GetParameters(); + bool handled = true; + + for (int i = 0; i < parameters.Length; i++) + { + Type parameterType = parameters[i].ParameterType; + if (parameterType == ctx.MapType(typeof(SerializationContext))) + { + ctx.LoadSerializationContext(); + } + else if (parameterType == ctx.MapType(typeof(System.Type))) + { + Type tmp = constructType; + if (tmp == null) tmp = type; // no ?? in some C# profiles + ctx.LoadValue(tmp); + } +#if PLAT_BINARYFORMATTER + else if (parameterType == ctx.MapType(typeof(System.Runtime.Serialization.StreamingContext))) + { + ctx.LoadSerializationContext(); + MethodInfo op = ctx.MapType(typeof(SerializationContext)).GetMethod("op_Implicit", new Type[] { ctx.MapType(typeof(SerializationContext)) }); + if (op != null) + { // it isn't always! (framework versions, etc) + ctx.EmitCall(op); + handled = true; + } + } +#endif + else + { + handled = false; + } + } + if (handled) + { + ctx.EmitCall(method); + if (constructType != null) + { + if (method.ReturnType == ctx.MapType(typeof(object))) + { + ctx.CastFromObject(type); + } + } + } + else + { + throw Meta.CallbackSet.CreateInvalidCallbackSignature(method); + } + } + } + + private void EmitCallbackIfNeeded(Compiler.CompilerContext ctx, Compiler.Local valueFrom, TypeModel.CallbackType callbackType) + { + Helpers.DebugAssert(valueFrom != null); + if (isRootType && ((IProtoTypeSerializer)this).HasCallbacks(callbackType)) + { + ((IProtoTypeSerializer)this).EmitCallback(ctx, valueFrom, callbackType); + } + } + + void IProtoTypeSerializer.EmitCallback(Compiler.CompilerContext ctx, Compiler.Local valueFrom, TypeModel.CallbackType callbackType) + { + bool actuallyHasInheritance = false; + if (CanHaveInheritance) + { + + for (int i = 0; i < serializers.Length; i++) + { + IProtoSerializer ser = serializers[i]; + if (ser.ExpectedType != forType && ((IProtoTypeSerializer)ser).HasCallbacks(callbackType)) + { + actuallyHasInheritance = true; + } + } + } + + Helpers.DebugAssert(((IProtoTypeSerializer)this).HasCallbacks(callbackType), "Shouldn't be calling this if there is nothing to do"); + MethodInfo method = callbacks?[callbackType]; + if (method == null && !actuallyHasInheritance) + { + return; + } + ctx.LoadAddress(valueFrom, ExpectedType); + EmitInvokeCallback(ctx, method, actuallyHasInheritance, null, forType); + + if (actuallyHasInheritance) + { + Compiler.CodeLabel @break = ctx.DefineLabel(); + for (int i = 0; i < serializers.Length; i++) + { + IProtoSerializer ser = serializers[i]; + IProtoTypeSerializer typeser; + Type serType = ser.ExpectedType; + if (serType != forType && + (typeser = (IProtoTypeSerializer)ser).HasCallbacks(callbackType)) + { + Compiler.CodeLabel ifMatch = ctx.DefineLabel(), nextTest = ctx.DefineLabel(); + ctx.CopyValue(); + ctx.TryCast(serType); + ctx.CopyValue(); + ctx.BranchIfTrue(ifMatch, true); + ctx.DiscardValue(); + ctx.Branch(nextTest, false); + ctx.MarkLabel(ifMatch); + typeser.EmitCallback(ctx, null, callbackType); + ctx.Branch(@break, false); + ctx.MarkLabel(nextTest); + } + } + ctx.MarkLabel(@break); + ctx.DiscardValue(); + } + } + + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + Type expected = ExpectedType; + Helpers.DebugAssert(valueFrom != null); + + using (Compiler.Local loc = ctx.GetLocalWithValue(expected, valueFrom)) + using (Compiler.Local fieldNumber = new Compiler.Local(ctx, ctx.MapType(typeof(int)))) + { + // pre-callbacks + if (HasCallbacks(TypeModel.CallbackType.BeforeDeserialize)) + { + if (Helpers.IsValueType(ExpectedType)) + { + EmitCallbackIfNeeded(ctx, loc, TypeModel.CallbackType.BeforeDeserialize); + } + else + { // could be null + Compiler.CodeLabel callbacksDone = ctx.DefineLabel(); + ctx.LoadValue(loc); + ctx.BranchIfFalse(callbacksDone, false); + EmitCallbackIfNeeded(ctx, loc, TypeModel.CallbackType.BeforeDeserialize); + ctx.MarkLabel(callbacksDone); + } + } + + Compiler.CodeLabel @continue = ctx.DefineLabel(), processField = ctx.DefineLabel(); + ctx.Branch(@continue, false); + + ctx.MarkLabel(processField); + foreach (BasicList.Group group in BasicList.GetContiguousGroups(fieldNumbers, serializers)) + { + Compiler.CodeLabel tryNextField = ctx.DefineLabel(); + int groupItemCount = group.Items.Count; + if (groupItemCount == 1) + { + // discreet group; use an equality test + ctx.LoadValue(fieldNumber); + ctx.LoadValue(group.First); + Compiler.CodeLabel processThisField = ctx.DefineLabel(); + ctx.BranchIfEqual(processThisField, true); + ctx.Branch(tryNextField, false); + WriteFieldHandler(ctx, expected, loc, processThisField, @continue, (IProtoSerializer)group.Items[0]); + } + else + { // implement as a jump-table-based switch + ctx.LoadValue(fieldNumber); + ctx.LoadValue(group.First); + ctx.Subtract(); // jump-tables are zero-based + Compiler.CodeLabel[] jmp = new Compiler.CodeLabel[groupItemCount]; + for (int i = 0; i < groupItemCount; i++) + { + jmp[i] = ctx.DefineLabel(); + } + ctx.Switch(jmp); + // write the default... + ctx.Branch(tryNextField, false); + for (int i = 0; i < groupItemCount; i++) + { + WriteFieldHandler(ctx, expected, loc, jmp[i], @continue, (IProtoSerializer)group.Items[i]); + } + } + ctx.MarkLabel(tryNextField); + } + + EmitCreateIfNull(ctx, loc); + ctx.LoadReaderWriter(); + if (isExtensible) + { + ctx.LoadValue(loc); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("AppendExtensionData")); + } + else + { + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("SkipField")); + } + + ctx.MarkLabel(@continue); + ctx.EmitBasicRead("ReadFieldHeader", ctx.MapType(typeof(int))); + ctx.CopyValue(); + ctx.StoreValue(fieldNumber); + ctx.LoadValue(0); + ctx.BranchIfGreater(processField, false); + + EmitCreateIfNull(ctx, loc); + // post-callbacks + EmitCallbackIfNeeded(ctx, loc, TypeModel.CallbackType.AfterDeserialize); + + if (valueFrom != null && !loc.IsSame(valueFrom)) + { + ctx.LoadValue(loc); + ctx.Cast(valueFrom.Type); + ctx.StoreValue(valueFrom); + } + } + } + + private void WriteFieldHandler( + Compiler.CompilerContext ctx, Type expected, Compiler.Local loc, + Compiler.CodeLabel handler, Compiler.CodeLabel @continue, IProtoSerializer serializer) + { + ctx.MarkLabel(handler); + Type serType = serializer.ExpectedType; + if (serType == forType) + { + EmitCreateIfNull(ctx, loc); + serializer.EmitRead(ctx, loc); + } + else + { + //RuntimeTypeModel rtm = (RuntimeTypeModel)ctx.Model; + if (((IProtoTypeSerializer)serializer).CanCreateInstance()) + { + Compiler.CodeLabel allDone = ctx.DefineLabel(); + + ctx.LoadValue(loc); + ctx.BranchIfFalse(allDone, false); // null is always ok + + ctx.LoadValue(loc); + ctx.TryCast(serType); + ctx.BranchIfTrue(allDone, false); // not null, but of the correct type + + // otherwise, need to convert it + ctx.LoadReaderWriter(); + ctx.LoadValue(loc); + ((IProtoTypeSerializer)serializer).EmitCreateInstance(ctx); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("Merge")); + ctx.Cast(expected); + ctx.StoreValue(loc); // Merge always returns a value + + // nothing needs doing + ctx.MarkLabel(allDone); + } + + if (Helpers.IsValueType(serType)) + { + Compiler.CodeLabel initValue = ctx.DefineLabel(); + Compiler.CodeLabel hasValue = ctx.DefineLabel(); + using (Compiler.Local emptyValue = new Compiler.Local(ctx, serType)) + { + ctx.LoadValue(loc); + ctx.BranchIfFalse(initValue, false); + + ctx.LoadValue(loc); + ctx.CastFromObject(serType); + ctx.Branch(hasValue, false); + + ctx.MarkLabel(initValue); + ctx.InitLocal(serType, emptyValue); + ctx.LoadValue(emptyValue); + + ctx.MarkLabel(hasValue); + } + } + else + { + ctx.LoadValue(loc); + ctx.Cast(serType); + } + + serializer.EmitRead(ctx, null); + + } + + if (serializer.ReturnsValue) + { // update the variable + if (Helpers.IsValueType(serType)) + { + // but box it first in case of value type + ctx.CastToObject(serType); + } + ctx.StoreValue(loc); + } + ctx.Branch(@continue, false); // "continue" + } + + void IProtoTypeSerializer.EmitCreateInstance(Compiler.CompilerContext ctx) + { + // different ways of creating a new instance + bool callNoteObject = true; + if (factory != null) + { + EmitInvokeCallback(ctx, factory, false, constructType, forType); + } + else if (!useConstructor) + { // DataContractSerializer style + ctx.LoadValue(constructType); + ctx.EmitCall(ctx.MapType(typeof(BclHelpers)).GetMethod("GetUninitializedObject")); + ctx.Cast(forType); + } + else if (Helpers.IsClass(constructType) && hasConstructor) + { // XmlSerializer style + ctx.EmitCtor(constructType); + } + else + { + ctx.LoadValue(ExpectedType); + ctx.EmitCall(ctx.MapType(typeof(TypeModel)).GetMethod("ThrowCannotCreateInstance", + BindingFlags.Static | BindingFlags.Public)); + ctx.LoadNullRef(); + callNoteObject = false; + } + if (callNoteObject) + { + // track root object creation + ctx.CopyValue(); + ctx.LoadReaderWriter(); + ctx.EmitCall(ctx.MapType(typeof(ProtoReader)).GetMethod("NoteObject", + BindingFlags.Static | BindingFlags.Public)); + } + if (baseCtorCallbacks != null) + { + for (int i = 0; i < baseCtorCallbacks.Length; i++) + { + EmitInvokeCallback(ctx, baseCtorCallbacks[i], true, null, forType); + } + } + } + private void EmitCreateIfNull(Compiler.CompilerContext ctx, Compiler.Local storage) + { + Helpers.DebugAssert(storage != null); + if (!Helpers.IsValueType(ExpectedType)) + { + Compiler.CodeLabel afterNullCheck = ctx.DefineLabel(); + ctx.LoadValue(storage); + ctx.BranchIfTrue(afterNullCheck, false); + + ((IProtoTypeSerializer)this).EmitCreateInstance(ctx); + + if (callbacks != null) EmitInvokeCallback(ctx, callbacks.BeforeDeserialize, true, null, forType); + ctx.StoreValue(storage); + ctx.MarkLabel(afterNullCheck); + } + } +#endif + } + +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/TypeSerializer.cs.meta b/Runtime/Protobuf-net/Serializers/TypeSerializer.cs.meta new file mode 100644 index 0000000..0b9bc49 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/TypeSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b3f577c98285d56469b3eb1c9190e174 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/UInt16Serializer.cs b/Runtime/Protobuf-net/Serializers/UInt16Serializer.cs new file mode 100644 index 0000000..ff9f89b --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/UInt16Serializer.cs @@ -0,0 +1,43 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + class UInt16Serializer : IProtoSerializer + { + static readonly Type expectedType = typeof(ushort); + + public UInt16Serializer(ProtoBuf.Meta.TypeModel model) + { + } + + public virtual Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public virtual object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadUInt16(); + } + + public virtual void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteUInt16((ushort)value, dest); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteUInt16", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadUInt16", ctx.MapType(typeof(ushort))); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/UInt16Serializer.cs.meta b/Runtime/Protobuf-net/Serializers/UInt16Serializer.cs.meta new file mode 100644 index 0000000..3e94120 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/UInt16Serializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 95fff5b2239c48c4cbb32346fee1be94 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/UInt32Serializer.cs b/Runtime/Protobuf-net/Serializers/UInt32Serializer.cs new file mode 100644 index 0000000..08b4f4b --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/UInt32Serializer.cs @@ -0,0 +1,43 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class UInt32Serializer : IProtoSerializer + { + static readonly Type expectedType = typeof(uint); + + public UInt32Serializer(ProtoBuf.Meta.TypeModel model) + { + + } + + public Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadUInt32(); + } + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteUInt32((uint)value, dest); + } +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteUInt32", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadUInt32", ctx.MapType(typeof(uint))); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/UInt32Serializer.cs.meta b/Runtime/Protobuf-net/Serializers/UInt32Serializer.cs.meta new file mode 100644 index 0000000..342cdec --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/UInt32Serializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 79149f5f69e868c45a17d389322e4bb7 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/UInt64Serializer.cs b/Runtime/Protobuf-net/Serializers/UInt64Serializer.cs new file mode 100644 index 0000000..8577edd --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/UInt64Serializer.cs @@ -0,0 +1,43 @@ +#if !NO_RUNTIME +using System; + +namespace ProtoBuf.Serializers +{ + sealed class UInt64Serializer : IProtoSerializer + { + static readonly Type expectedType = typeof(ulong); + + public UInt64Serializer(ProtoBuf.Meta.TypeModel model) + { + + } + public Type ExpectedType => expectedType; + + bool IProtoSerializer.RequiresOldValue => false; + + bool IProtoSerializer.ReturnsValue => true; + + public object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // since replaces + return source.ReadUInt64(); + } + + public void Write(object value, ProtoWriter dest) + { + ProtoWriter.WriteUInt64((ulong)value, dest); + } + +#if FEAT_COMPILER + void IProtoSerializer.EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicWrite("WriteUInt64", valueFrom); + } + void IProtoSerializer.EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.EmitBasicRead("ReadUInt64", ExpectedType); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/UInt64Serializer.cs.meta b/Runtime/Protobuf-net/Serializers/UInt64Serializer.cs.meta new file mode 100644 index 0000000..72452d9 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/UInt64Serializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: e549c20b3409c4a4dbf0e7fc25062c71 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/Serializers/UriDecorator.cs b/Runtime/Protobuf-net/Serializers/UriDecorator.cs new file mode 100644 index 0000000..d34ac2d --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/UriDecorator.cs @@ -0,0 +1,62 @@ +#if !NO_RUNTIME +using System; +using System.Reflection; + +#if FEAT_COMPILER +using ProtoBuf.Compiler; +#endif + +namespace ProtoBuf.Serializers +{ + sealed class UriDecorator : ProtoDecoratorBase + { + static readonly Type expectedType = typeof(Uri); + public UriDecorator(ProtoBuf.Meta.TypeModel model, IProtoSerializer tail) : base(tail) + { + + } + + public override Type ExpectedType => expectedType; + + public override bool RequiresOldValue => false; + + public override bool ReturnsValue => true; + + public override void Write(object value, ProtoWriter dest) + { + Tail.Write(((Uri)value).OriginalString, dest); + } + + public override object Read(object value, ProtoReader source) + { + Helpers.DebugAssert(value == null); // not expecting incoming + string s = (string)Tail.Read(null, source); + return s.Length == 0 ? null : new Uri(s, UriKind.RelativeOrAbsolute); + } + +#if FEAT_COMPILER + protected override void EmitWrite(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + ctx.LoadValue(valueFrom); + ctx.LoadValue(typeof(Uri).GetProperty("OriginalString")); + Tail.EmitWrite(ctx, null); + } + protected override void EmitRead(Compiler.CompilerContext ctx, Compiler.Local valueFrom) + { + Tail.EmitRead(ctx, valueFrom); + ctx.CopyValue(); + Compiler.CodeLabel @nonEmpty = ctx.DefineLabel(), @end = ctx.DefineLabel(); + ctx.LoadValue(typeof(string).GetProperty("Length")); + ctx.BranchIfTrue(@nonEmpty, true); + ctx.DiscardValue(); + ctx.LoadNullRef(); + ctx.Branch(@end, true); + ctx.MarkLabel(@nonEmpty); + ctx.LoadValue((int)UriKind.RelativeOrAbsolute); + ctx.EmitCtor(ctx.MapType(typeof(Uri)), ctx.MapType(typeof(string)), ctx.MapType(typeof(UriKind))); + ctx.MarkLabel(@end); + } +#endif + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/Serializers/UriDecorator.cs.meta b/Runtime/Protobuf-net/Serializers/UriDecorator.cs.meta new file mode 100644 index 0000000..0095ee9 --- /dev/null +++ b/Runtime/Protobuf-net/Serializers/UriDecorator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b784c432eb5cbf742b3d96161e7c8d73 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ServiceModel.meta b/Runtime/Protobuf-net/ServiceModel.meta new file mode 100644 index 0000000..36e1d95 --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: af34a7ba57dbd6340b8d3fa0bfdbd0a1 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorAttribute.cs b/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorAttribute.cs new file mode 100644 index 0000000..928207e --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorAttribute.cs @@ -0,0 +1,35 @@ +#if FEAT_SERVICEMODEL && PLAT_XMLSERIALIZER +using System; +using System.ServiceModel.Channels; +using System.ServiceModel.Description; +using System.ServiceModel.Dispatcher; + +namespace ProtoBuf.ServiceModel +{ + /// + /// Uses protocol buffer serialization on the specified operation; note that this + /// must be enabled on both the client and server. + /// + [AttributeUsage(AttributeTargets.Method, AllowMultiple = false)] + public sealed class ProtoBehaviorAttribute : Attribute, IOperationBehavior + { + void IOperationBehavior.AddBindingParameters(OperationDescription operationDescription, BindingParameterCollection bindingParameters) + { } + + void IOperationBehavior.ApplyClientBehavior(OperationDescription operationDescription, ClientOperation clientOperation) + { + IOperationBehavior innerBehavior = new ProtoOperationBehavior(operationDescription); + innerBehavior.ApplyClientBehavior(operationDescription, clientOperation); + } + + void IOperationBehavior.ApplyDispatchBehavior(OperationDescription operationDescription, DispatchOperation dispatchOperation) + { + IOperationBehavior innerBehavior = new ProtoOperationBehavior(operationDescription); + innerBehavior.ApplyDispatchBehavior(operationDescription, dispatchOperation); + } + + void IOperationBehavior.Validate(OperationDescription operationDescription) + { } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorAttribute.cs.meta b/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorAttribute.cs.meta new file mode 100644 index 0000000..1facc70 --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorAttribute.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: feda16667cbcb8248951368dfbfef6b9 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorExtensionElement.cs b/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorExtensionElement.cs new file mode 100644 index 0000000..56edb79 --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorExtensionElement.cs @@ -0,0 +1,29 @@ +#if FEAT_SERVICEMODEL && PLAT_XMLSERIALIZER && FEAT_SERVICECONFIGMODEL +using System; +using System.ServiceModel.Configuration; + +namespace ProtoBuf.ServiceModel +{ + /// + /// Configuration element to swap out DatatContractSerilaizer with the XmlProtoSerializer for a given endpoint. + /// + /// + public class ProtoBehaviorExtension : BehaviorExtensionElement + { + /// + /// Creates a new ProtoBehaviorExtension instance. + /// + public ProtoBehaviorExtension() { } + /// + /// Gets the type of behavior. + /// + public override Type BehaviorType => typeof(ProtoEndpointBehavior); + + /// + /// Creates a behavior extension based on the current configuration settings. + /// + /// The behavior extension. + protected override object CreateBehavior() => new ProtoEndpointBehavior(); + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorExtensionElement.cs.meta b/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorExtensionElement.cs.meta new file mode 100644 index 0000000..7850781 --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel/ProtoBehaviorExtensionElement.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: c70aaa3829dd1fa45b0530efc37727f5 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ServiceModel/ProtoEndpointBehavior.cs b/Runtime/Protobuf-net/ServiceModel/ProtoEndpointBehavior.cs new file mode 100644 index 0000000..9bcfb99 --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel/ProtoEndpointBehavior.cs @@ -0,0 +1,82 @@ +#if FEAT_SERVICEMODEL && PLAT_XMLSERIALIZER +using System.ServiceModel.Description; + +namespace ProtoBuf.ServiceModel +{ + /// + /// Behavior to swap out DatatContractSerilaizer with the XmlProtoSerializer for a given endpoint. + /// + /// Add the following to the server and client app.config in the system.serviceModel section: + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// + /// Configure your endpoints to have a behaviorConfiguration as follows: + /// + /// + /// + /// + /// + /// + /// + /// + /// + public class ProtoEndpointBehavior : IEndpointBehavior + { + #region IEndpointBehavior Members + + void IEndpointBehavior.AddBindingParameters(ServiceEndpoint endpoint, System.ServiceModel.Channels.BindingParameterCollection bindingParameters) + { + } + + void IEndpointBehavior.ApplyClientBehavior(ServiceEndpoint endpoint, System.ServiceModel.Dispatcher.ClientRuntime clientRuntime) + { + ReplaceDataContractSerializerOperationBehavior(endpoint); + } + + void IEndpointBehavior.ApplyDispatchBehavior(ServiceEndpoint endpoint, System.ServiceModel.Dispatcher.EndpointDispatcher endpointDispatcher) + { + ReplaceDataContractSerializerOperationBehavior(endpoint); + } + + void IEndpointBehavior.Validate(ServiceEndpoint endpoint) + { + } + + private static void ReplaceDataContractSerializerOperationBehavior(ServiceEndpoint serviceEndpoint) + { + foreach (OperationDescription operationDescription in serviceEndpoint.Contract.Operations) + { + ReplaceDataContractSerializerOperationBehavior(operationDescription); + } + } + + private static void ReplaceDataContractSerializerOperationBehavior(OperationDescription description) + { + DataContractSerializerOperationBehavior dcsOperationBehavior = description.Behaviors.Find(); + if (dcsOperationBehavior != null) + { + description.Behaviors.Remove(dcsOperationBehavior); + + ProtoOperationBehavior newBehavior = new ProtoOperationBehavior(description); + newBehavior.MaxItemsInObjectGraph = dcsOperationBehavior.MaxItemsInObjectGraph; + description.Behaviors.Add(newBehavior); + } + } + + #endregion + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/ServiceModel/ProtoEndpointBehavior.cs.meta b/Runtime/Protobuf-net/ServiceModel/ProtoEndpointBehavior.cs.meta new file mode 100644 index 0000000..23ab783 --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel/ProtoEndpointBehavior.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 6776c4cee4f69a94e9507afa458fdb50 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ServiceModel/ProtoOperationBehavior.cs b/Runtime/Protobuf-net/ServiceModel/ProtoOperationBehavior.cs new file mode 100644 index 0000000..9d5f02c --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel/ProtoOperationBehavior.cs @@ -0,0 +1,52 @@ +#if FEAT_SERVICEMODEL && PLAT_XMLSERIALIZER +using System; +using System.Collections.Generic; +using System.Runtime.Serialization; +using System.ServiceModel.Description; +using System.Xml; +using ProtoBuf.Meta; + +namespace ProtoBuf.ServiceModel +{ + /// + /// Describes a WCF operation behaviour that can perform protobuf serialization + /// + public sealed class ProtoOperationBehavior : DataContractSerializerOperationBehavior + { + private TypeModel model; + + /// + /// Create a new ProtoOperationBehavior instance + /// + public ProtoOperationBehavior(OperationDescription operation) : base(operation) + { +#if !NO_RUNTIME + model = RuntimeTypeModel.Default; +#endif + } + + /// + /// The type-model that should be used with this behaviour + /// + public TypeModel Model + { + get { return model; } + set + { + model = value ?? throw new ArgumentNullException(nameof(value)); + } + } + + //public ProtoOperationBehavior(OperationDescription operation, DataContractFormatAttribute dataContractFormat) : base(operation, dataContractFormat) { } + + /// + /// Creates a protobuf serializer if possible (falling back to the default WCF serializer) + /// + public override XmlObjectSerializer CreateSerializer(Type type, XmlDictionaryString name, XmlDictionaryString ns, IList knownTypes) + { + if (model == null) throw new InvalidOperationException("No Model instance has been assigned to the ProtoOperationBehavior"); + return XmlProtoSerializer.TryCreate(model, type) ?? base.CreateSerializer(type, name, ns, knownTypes); + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/ServiceModel/ProtoOperationBehavior.cs.meta b/Runtime/Protobuf-net/ServiceModel/ProtoOperationBehavior.cs.meta new file mode 100644 index 0000000..3bd6fe4 --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel/ProtoOperationBehavior.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: bc6637ab509d5ba41b14e428ed365764 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/ServiceModel/XmlProtoSerializer.cs b/Runtime/Protobuf-net/ServiceModel/XmlProtoSerializer.cs new file mode 100644 index 0000000..23959ea --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel/XmlProtoSerializer.cs @@ -0,0 +1,208 @@ +#if FEAT_SERVICEMODEL && PLAT_XMLSERIALIZER +using System; +using System.IO; +using System.Runtime.Serialization; +using System.Xml; +using ProtoBuf.Meta; + +namespace ProtoBuf.ServiceModel +{ + /// + /// An xml object serializer that can embed protobuf data in a base-64 hunk (looking like a byte[]) + /// + public sealed class XmlProtoSerializer : XmlObjectSerializer + { + private readonly TypeModel model; + private readonly int key; + private readonly bool isList, isEnum; + private readonly Type type; + internal XmlProtoSerializer(TypeModel model, int key, Type type, bool isList) + { + if (key < 0) throw new ArgumentOutOfRangeException(nameof(key)); + this.model = model ?? throw new ArgumentNullException(nameof(model)); + this.key = key; + this.isList = isList; + this.type = type ?? throw new ArgumentOutOfRangeException(nameof(type)); + this.isEnum = Helpers.IsEnum(type); + } + /// + /// Attempt to create a new serializer for the given model and type + /// + /// A new serializer instance if the type is recognised by the model; null otherwise + public static XmlProtoSerializer TryCreate(TypeModel model, Type type) + { + if (model == null) throw new ArgumentNullException(nameof(model)); + if (type == null) throw new ArgumentNullException(nameof(type)); + + int key = GetKey(model, ref type, out bool isList); + if (key >= 0) + { + return new XmlProtoSerializer(model, key, type, isList); + } + return null; + } + + /// + /// Creates a new serializer for the given model and type + /// + public XmlProtoSerializer(TypeModel model, Type type) + { + if (model == null) throw new ArgumentNullException(nameof(model)); + if (type == null) throw new ArgumentNullException(nameof(type)); + + key = GetKey(model, ref type, out isList); + this.model = model; + this.type = type; + this.isEnum = Helpers.IsEnum(type); + if (key < 0) throw new ArgumentOutOfRangeException(nameof(type), "Type not recognised by the model: " + type.FullName); + } + + static int GetKey(TypeModel model, ref Type type, out bool isList) + { + if (model != null && type != null) + { + int key = model.GetKey(ref type); + if (key >= 0) + { + isList = false; + return key; + } + Type itemType = TypeModel.GetListItemType(model, type); + if (itemType != null) + { + key = model.GetKey(ref itemType); + if (key >= 0) + { + isList = true; + return key; + } + } + } + + isList = false; + return -1; + } + + /// + /// Ends an object in the output + /// + public override void WriteEndObject(XmlDictionaryWriter writer) + { + if (writer == null) throw new ArgumentNullException(nameof(writer)); + writer.WriteEndElement(); + } + + /// + /// Begins an object in the output + /// + public override void WriteStartObject(XmlDictionaryWriter writer, object graph) + { + if (writer == null) throw new ArgumentNullException(nameof(writer)); + writer.WriteStartElement(PROTO_ELEMENT); + } + + private const string PROTO_ELEMENT = "proto"; + + /// + /// Writes the body of an object in the output + /// + public override void WriteObjectContent(XmlDictionaryWriter writer, object graph) + { + if (writer == null) throw new ArgumentNullException(nameof(writer)); + if (graph == null) + { + writer.WriteAttributeString("nil", "true"); + } + else + { + using (MemoryStream ms = new MemoryStream()) + { + if (isList) + { + model.Serialize(ms, graph, null); + } + else + { + using (ProtoWriter protoWriter = ProtoWriter.Create(ms, model, null)) + { + model.Serialize(key, graph, protoWriter); + } + } + byte[] buffer = ms.GetBuffer(); + writer.WriteBase64(buffer, 0, (int)ms.Length); + } + } + } + + /// + /// Indicates whether this is the start of an object we are prepared to handle + /// + public override bool IsStartObject(XmlDictionaryReader reader) + { + if (reader == null) throw new ArgumentNullException(nameof(reader)); + reader.MoveToContent(); + return reader.NodeType == XmlNodeType.Element && reader.Name == PROTO_ELEMENT; + } + + /// + /// Reads the body of an object + /// + public override object ReadObject(XmlDictionaryReader reader, bool verifyObjectName) + { + if (reader == null) throw new ArgumentNullException(nameof(reader)); + reader.MoveToContent(); + bool isSelfClosed = reader.IsEmptyElement, isNil = reader.GetAttribute("nil") == "true"; + reader.ReadStartElement(PROTO_ELEMENT); + + // explicitly null + if (isNil) + { + if (!isSelfClosed) reader.ReadEndElement(); + return null; + } + if (isSelfClosed) // no real content + { + if (isList || isEnum) + { + return model.Deserialize(Stream.Null, null, type, null); + } + ProtoReader protoReader = null; + try + { + protoReader = ProtoReader.Create(Stream.Null, model, null, ProtoReader.TO_EOF); + return model.Deserialize(key, null, protoReader); + } + finally + { + ProtoReader.Recycle(protoReader); + } + } + + object result; + Helpers.DebugAssert(reader.CanReadBinaryContent, "CanReadBinaryContent"); + using (MemoryStream ms = new MemoryStream(reader.ReadContentAsBase64())) + { + if (isList || isEnum) + { + result = model.Deserialize(ms, null, type, null); + } + else + { + ProtoReader protoReader = null; + try + { + protoReader = ProtoReader.Create(ms, model, null, ProtoReader.TO_EOF); + result = model.Deserialize(key, null, protoReader); + } + finally + { + ProtoReader.Recycle(protoReader); + } + } + } + reader.ReadEndElement(); + return result; + } + } +} +#endif \ No newline at end of file diff --git a/Runtime/Protobuf-net/ServiceModel/XmlProtoSerializer.cs.meta b/Runtime/Protobuf-net/ServiceModel/XmlProtoSerializer.cs.meta new file mode 100644 index 0000000..d564f13 --- /dev/null +++ b/Runtime/Protobuf-net/ServiceModel/XmlProtoSerializer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: bca9bc75e9bb7c841b04b85204a0c9f6 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/SubItemToken.cs b/Runtime/Protobuf-net/SubItemToken.cs new file mode 100644 index 0000000..51f4a24 --- /dev/null +++ b/Runtime/Protobuf-net/SubItemToken.cs @@ -0,0 +1,16 @@ + +using System; + +namespace ProtoBuf +{ + /// + /// Used to hold particulars relating to nested objects. This is opaque to the caller - simply + /// give back the token you are given at the end of an object. + /// + public readonly struct SubItemToken + { + internal readonly long value64; + internal SubItemToken(int value) => value64 = value; + internal SubItemToken(long value) => value64 = value; + } +} diff --git a/Runtime/Protobuf-net/SubItemToken.cs.meta b/Runtime/Protobuf-net/SubItemToken.cs.meta new file mode 100644 index 0000000..75435a1 --- /dev/null +++ b/Runtime/Protobuf-net/SubItemToken.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: bbb510795b4f3fa46aeecbd4521adfc0 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/Protobuf-net/WireType.cs b/Runtime/Protobuf-net/WireType.cs new file mode 100644 index 0000000..ab4fa20 --- /dev/null +++ b/Runtime/Protobuf-net/WireType.cs @@ -0,0 +1,50 @@ +namespace ProtoBuf +{ + /// + /// Indicates the encoding used to represent an individual value in a protobuf stream + /// + public enum WireType + { + /// + /// Represents an error condition + /// + None = -1, + + /// + /// Base-128 variant-length encoding + /// + Variant = 0, + + /// + /// Fixed-length 8-byte encoding + /// + Fixed64 = 1, + + /// + /// Length-variant-prefixed encoding + /// + String = 2, + + /// + /// Indicates the start of a group + /// + StartGroup = 3, + + /// + /// Indicates the end of a group + /// + EndGroup = 4, + + /// + /// Fixed-length 4-byte encoding + /// 10 + Fixed32 = 5, + + /// + /// This is not a formal wire-type in the "protocol buffers" spec, but + /// denotes a variant integer that should be interpreted using + /// zig-zag semantics (so -ve numbers aren't a significant overhead) + /// + SignedVariant = WireType.Variant | (1 << 3), + } +} diff --git a/Runtime/Protobuf-net/WireType.cs.meta b/Runtime/Protobuf-net/WireType.cs.meta new file mode 100644 index 0000000..2566026 --- /dev/null +++ b/Runtime/Protobuf-net/WireType.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 0a8403cbfeff31942997d1726a909e89 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp.meta b/Runtime/csharp-kcp.meta new file mode 100644 index 0000000..994a01e --- /dev/null +++ b/Runtime/csharp-kcp.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 70af0f7bda0af43e28b19c8b9bcb332c +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins.meta b/Runtime/csharp-kcp/Plugins.meta new file mode 100644 index 0000000..372aaf1 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 75050ad5678e1a14ca35aca009239c94 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll new file mode 100644 index 0000000..33ee9d8 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll.mdb b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll.mdb new file mode 100644 index 0000000..c71d1e9 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll.mdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll.mdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll.mdb.meta new file mode 100644 index 0000000..432ce29 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll.mdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: e9f013cd847fa5144a1b8089fe0a7888 +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll.meta new file mode 100644 index 0000000..3e863e4 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.dll.meta @@ -0,0 +1,33 @@ +fileFormatVersion: 2 +guid: ec820f421ab70614f92d92ce8266e58b +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + Any: + second: + enabled: 1 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.pdb b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.pdb new file mode 100644 index 0000000..29075fb Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.pdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.pdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.pdb.meta new file mode 100644 index 0000000..43a7034 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.pdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: f115da9a4e605a04590cbbc5c46e416f +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.xml b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.xml new file mode 100644 index 0000000..f23c85d --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.xml @@ -0,0 +1,1978 @@ + + + + DotNetty.Buffers + + + + + + Abstract base class implementation of a + + + + + + Abstract base class for instances + + + + + + Abstract base class for implementations that wrap another + . + + + + + Returns a hex dump + of the specified buffer's sub-region. + + + + + Returns a hex dump + of the specified buffer's sub-region. + + + + + Returns a hex dump + of the specified buffer's sub-region. + + + + + Returns a hex dump + of the specified buffer's sub-region. + + + + + Calculates the hash code of the specified buffer. This method is + useful when implementing a new buffer type. + + + + + Returns the reader index of needle in haystack, or -1 if needle is not in haystack. + + + + + Returns {@code true} if and only if the two specified buffers are + identical to each other for {@code length} bytes starting at {@code aStartIndex} + index for the {@code a} buffer and {@code bStartIndex} index for the {@code b} buffer. + A more compact way to express this is: +

+ {@code a[aStartIndex : aStartIndex + length] == b[bStartIndex : bStartIndex + length]} +

+
+ + + Returns {@code true} if and only if the two specified buffers are + identical to each other as described in {@link ByteBuf#equals(Object)}. + This method is useful when implementing a new buffer type. + + + + + Compares the two specified buffers as described in {@link ByteBuf#compareTo(ByteBuf)}. + This method is useful when implementing a new buffer type. + + + + + The default implementation of . + This method is useful when implementing a new buffer type. + + + + + Read the given amount of bytes into a new {@link ByteBuf} that is allocated from the {@link ByteBufAllocator}. + + + + + Encode a string in http://en.wikipedia.org/wiki/UTF-8 and write it into reserveBytes of + a byte buffer. The reserveBytes must be computed (ie eagerly using {@link #utf8MaxBytes(string)} + or exactly with #utf8Bytes(string)}) to ensure this method not to not: for performance reasons + the index checks will be performed using just reserveBytes. + + This method returns the actual number of bytes written. + + + + Encode the given using the given into a new + which + is allocated via the . + + The to allocate {@link IByteBuffer}. + src The to encode. + charset The specified + + + + Encode the given using the given into a new + which + is allocated via the . + + The to allocate {@link IByteBuffer}. + src The to encode. + charset The specified + the extra capacity to alloc except the space for decoding. + + + + Returns a multi-line hexadecimal dump of the specified {@link ByteBuf} that is easy to read by humans. + + + + + Returns a multi-line hexadecimal dump of the specified {@link ByteBuf} that is easy to read by humans, + starting at the given {@code offset} using the given {@code length}. + + + + + Appends the prettified multi-line hexadecimal dump of the specified {@link ByteBuf} to the specified + {@link StringBuilder} that is easy to read by humans. + + + + + Appends the prettified multi-line hexadecimal dump of the specified {@link ByteBuf} to the specified + {@link StringBuilder} that is easy to read by humans, starting at the given {@code offset} using + the given {@code length}. + + + + + Toggles the endianness of the specified 64-bit long integer. + + + + + Toggles the endianness of the specified 32-bit integer. + + + + + Toggles the endianness of the specified 16-bit integer. + + + + + Default on most Windows systems + + + + + Add the given {@link IByteBuffer}. + Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuffer}. + If you need to have it increased you need to handle it by your own. + @param buffer the {@link IByteBuffer} to add + + + + + Add the given {@link IByteBuffer}s. + Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuffer}. + If you need to have it increased you need to handle it by your own. + @param buffers the {@link IByteBuffer}s to add + + + + + Add the given {@link IByteBuffer}s. + Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuffer}. + If you need to have it increased you need to handle it by your own. + @param buffers the {@link IByteBuffer}s to add + + + + + Add the given {@link IByteBuffer} on the specific index. + Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuffer}. + If you need to have it increased you need to handle it by your own. + @param cIndex the index on which the {@link IByteBuffer} will be added + @param buffer the {@link IByteBuffer} to add + + + + + Add the given {@link IByteBuffer}s on the specific index + Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuffer}. + If you need to have it increased you need to handle it by your own. + @param cIndex the index on which the {@link IByteBuffer} will be added. + @param buffers the {@link IByteBuffer}s to add + + + + + Add the given {@link ByteBuf}s on the specific index + Be aware that this method does not increase the {@code writerIndex} of the {@link CompositeByteBuffer}. + If you need to have it increased you need to handle it by your own. + @param cIndex the index on which the {@link IByteBuffer} will be added. + @param buffers the {@link IByteBuffer}s to add + + + + + This should only be called as last operation from a method as this may adjust the underlying + array of components and so affect the index etc. + + + + + Remove the {@link IByteBuffer} from the given index. + @param cIndex the index on from which the {@link IByteBuffer} will be remove + + + + + Remove the number of {@link IByteBuffer}s starting from the given index. + @param cIndex the index on which the {@link IByteBuffer}s will be started to removed + @param numComponents the number of components to remove + + + + + Same with {@link #slice(int, int)} except that this method returns a list. + + + + + Return the current number of {@link IByteBuffer}'s that are composed in this instance + + + + + Return the max number of {@link IByteBuffer}'s that are composed in this instance + + + + + Return the index for the given offset + + + + + Return the {@link IByteBuffer} on the specified index + @param cIndex the index for which the {@link IByteBuffer} should be returned + @return buffer the {@link IByteBuffer} on the specified index + + + + + Return the {@link IByteBuffer} on the specified index + @param offset the offset for which the {@link IByteBuffer} should be returned + @return the {@link IByteBuffer} on the specified index + + + + + Return the internal {@link IByteBuffer} on the specified index. Note that updating the indexes of the returned + buffer will lead to an undefined behavior of this buffer. + @param cIndex the index for which the {@link IByteBuffer} should be returned + + + + + Return the internal {@link IByteBuffer} on the specified offset. Note that updating the indexes of the returned + buffer will lead to an undefined behavior of this buffer. + @param offset the offset for which the {@link IByteBuffer} should be returned + + + + + Consolidate the composed {@link IByteBuffer}s + + + + + Consolidate the composed {@link IByteBuffer}s + @param cIndex the index on which to start to compose + @param numComponents the number of components to compose + + + + + Discard all {@link IByteBuffer}s which are read. + + + + + + Represents an empty byte buffer + + + + + Inspired by the Netty ByteBuffer implementation + (https://github.com/netty/netty/blob/master/buffer/src/main/java/io/netty/buffer/ByteBuf.java) + Provides circular-buffer-esque security around a byte array, allowing reads and writes to occur independently. + In general, the guarantees: + /// LESS THAN OR EQUAL TO LESS THAN OR EQUAL TO + . + + + + + Expands the capacity of this buffer so long as it is less than . + + + + + The allocator who created this buffer + + + + + Sets the of this buffer + + thrown if exceeds the length of the buffer + + + + Sets the of this buffer + + + thrown if is greater than + or less than 0. + + + + + Sets both indexes + + + thrown if or exceeds + the length of the buffer + + + + + Returns true if - is greater than 0. + + + + + Is the buffer readable if and only if the buffer contains equal or more than the specified number of elements + + The number of elements we would like to read + + + + Returns true if and only if - is greater than zero. + + + + + Returns true if and only if the buffer has enough to accomodate + additional bytes. + + The number of additional elements we would like to write. + + + + Sets the and to 0. Does not erase any of the data + written into the buffer already, + but it will overwrite that data. + + + + + Marks the current in this buffer. You can reposition the current + + to the marked by calling . + The initial value of the marked is 0. + + + + + Repositions the current to the marked in this buffer. + + + is thrown if the current is less than the + marked + + + + + Marks the current in this buffer. You can reposition the current + + to the marked by calling . + The initial value of the marked is 0. + + + + + Repositions the current to the marked in this buffer. + + + is thrown if the current is greater than the + marked + + + + + Discards the bytes between the 0th index and . + It moves the bytes between and to the 0th index, + and sets and to 0 and + oldWriterIndex - oldReaderIndex respectively. + + + + + Similar to except that this method might discard + some, all, or none of read bytes depending on its internal implementation to reduce + overall memory bandwidth consumption at the cost of potentially additional memory + consumption. + + + + + Makes sure the number of is equal to or greater than + the specified value (.) If there is enough writable bytes in this buffer, + the method returns with no side effect. Otherwise, it raises an . + + The expected number of minimum writable bytes + + if + > + . + + + + + Tries to make sure the number of + is equal to or greater than the specified value. Unlike , + this method does not raise an exception but returns a code. + + the expected minimum number of writable bytes + + When + minWritableBytes > : +
    +
  • true - the capacity of the buffer is expanded to
  • +
  • false - the capacity of the buffer is unchanged
  • +
+ + + 0 if the buffer has enough writable bytes, and its capacity is unchanged. + 1 if the buffer does not have enough bytes, and its capacity is unchanged. + 2 if the buffer has enough writable bytes, and its capacity has been increased. + 3 if the buffer does not have enough bytes, but its capacity has been increased to its maximum. + +
+ + + Gets a boolean at the specified absolute in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 1 greater than + + + + + Gets a byte at the specified absolute in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 1 greater than + + + + + Gets a short at the specified absolute in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 2 greater than + + + + + Gets a short at the specified absolute in this buffer + in Little Endian Byte Order. This method does not modify + or of this buffer. + + + if the specified is less than 0 or + index + 2 greater than + + + + + Gets an ushort at the specified absolute in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 2 greater than + + + + + Gets an ushort at the specified absolute in this buffer + in Little Endian Byte Order. This method does not modify + or of this buffer. + + + if the specified is less than 0 or + index + 2 greater than + + + + + Gets an integer at the specified absolute in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Gets an integer at the specified absolute in this buffer + in Little Endian Byte Order. This method does not modify + or of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Gets an unsigned integer at the specified absolute in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Gets an unsigned integer at the specified absolute in this buffer + in Little Endian Byte Order. This method does not modify + or of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Gets a long integer at the specified absolute in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 8 greater than + + + + + Gets a long integer at the specified absolute in this buffer + in Little Endian Byte Order. This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 8 greater than + + + + + Gets a 24-bit medium integer at the specified absolute index in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 3 greater than + + + + + Gets a 24-bit medium integer at the specified absolute index in this buffer + in Little Endian Byte Order. This method does not modify + or of this buffer. + + + if the specified is less than 0 or + index + 3 greater than + + + + + Gets an unsigned 24-bit medium integer at the specified absolute index in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 3 greater than + + + + + Gets an unsigned 24-bit medium integer at the specified absolute index in this buffer + in Little Endian Byte Order. This method does not modify + or of this buffer. + + + if the specified is less than 0 or + index + 3 greater than + + + + + Gets a char at the specified absolute in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 2 greater than + + + + + Gets a float at the specified absolute in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Gets a float at the specified absolute in this buffer + in Little Endian Byte Order. This method does not modify + or of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Gets a double at the specified absolute in this buffer. + This method does not modify or + of this buffer. + + + if the specified is less than 0 or + index + 8 greater than + + + + + Gets a double at the specified absolute in this buffer + in Little Endian Byte Order. This method does not modify + or of this buffer. + + + if the specified is less than 0 or + index + 8 greater than + + + + + Transfers this buffers data to the specified buffer starting at the specified + absolute until the destination becomes non-writable. + + + if the specified is less than 0 or + index + 1 greater than + + + + + Transfers this buffers data to the specified buffer starting at the specified + absolute until the destination becomes non-writable. + + + if the specified is less than 0 or + index + 1 greater than + + + + + Transfers this buffers data to the specified buffer starting at the specified + absolute until the destination becomes non-writable. + + + if the specified is less than 0 or + index + 1 greater than + + + + + Transfers this buffers data to the specified buffer starting at the specified + absolute until the destination becomes non-writable. + + + if the specified is less than 0 or + index + 1 greater than + + + + + Transfers this buffers data to the specified buffer starting at the specified + absolute until the destination becomes non-writable. + + + if the specified is less than 0 or + index + 1 greater than + + + + + Transfers this buffer's data to the specified stream starting at the + specified absolute index. + + + This method does not modify readerIndex or writerIndex of + this buffer. + + absolute index in this buffer to start getting bytes from + destination stream + the number of bytes to transfer + + if the specified index is less than 0 or + if index + length is greater than + this.capacity + + + + + Gets a string with the given length at the given index. + + + length the length to read + charset that should be use + the string value. + + if length is greater than readable bytes. + + + + + Sets the specified boolean at the specified absolute in this buffer. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 1 greater than + + + + + Sets the specified byte at the specified absolute in this buffer. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 1 greater than + + + + + Sets the specified short at the specified absolute in this buffer. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 2 greater than + + + + + Sets the specified short at the specified absolute in this buffer + in the Little Endian Byte Order. This method does not directly modify + or of this buffer. + + + if the specified is less than 0 or + index + 2 greater than + + + + + Sets the specified unsigned short at the specified absolute in this buffer. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 2 greater than + + + + + Sets the specified unsigned short at the specified absolute in this buffer + in the Little Endian Byte Order. This method does not directly modify + or of this buffer. + + + if the specified is less than 0 or + index + 2 greater than + + + + + Sets the specified integer at the specified absolute in this buffer. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Sets the specified integer at the specified absolute in this buffer + in the Little Endian Byte Order. This method does not directly modify + or of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Sets the specified unsigned integer at the specified absolute in this buffer. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Sets the specified unsigned integer at the specified absolute in this buffer + in the Little Endian Byte Order. This method does not directly modify or + of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Sets the specified 24-bit medium integer at the specified absolute in this buffer. + Note that the most significant byte is ignored in the specified value. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 3 greater than + + + + + Sets the specified 24-bit medium integer at the specified absolute in this buffer. + Note that the most significant byte is ignored in the specified value. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 3 greater than + + + + + Sets the specified long integer at the specified absolute in this buffer. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 8 greater than + + + + + Sets the specified long integer at the specified absolute in this buffer + in the Little Endian Byte Order. This method does not directly modify or + of this buffer. + + + if the specified is less than 0 or + index + 8 greater than + + + + + Sets the specified UTF-16 char at the specified absolute in this buffer. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 2 greater than + + + + + Sets the specified double at the specified absolute in this buffer. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 8 greater than + + + + + Sets the specified float at the specified absolute in this buffer. + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Sets the specified float at the specified absolute in this buffer + in Little Endian Byte Order. This method does not directly modify + or of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Sets the specified float at the specified absolute in this buffer + in Little Endian Byte Order. This method does not directly modify + or of this buffer. + + + if the specified is less than 0 or + index + 4 greater than + + + + + Transfers the byte buffer's contents starting at the specified absolute . + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + + .ReadableBytes greater than + + + + + Transfers the byte buffer's contents starting at the specified absolute . + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + is less than 0 or + + greater than + + + + + Transfers the byte buffer's contents starting at the specified absolute . + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + is less than 0 or + is less than 0 or + + greater than or + + greater than .Capacity + + + + + Transfers the byte buffer's contents starting at the specified absolute . + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + + .Length greater than + + + + + Transfers the byte buffer's contents starting at the specified absolute . + This method does not directly modify or of this buffer. + + + if the specified is less than 0 or + is less than 0 or + is less than 0 or + + greater than or + + greater than .Length + + + + + Transfers the content of the specified source stream to this buffer + starting at the specified absolute . + This method does not modify or of + this buffer. + + absolute index in this byte buffer to start writing to + + number of bytes to transfer + cancellation token + the actual number of bytes read in from the specified channel. + + if the specified index is less than 0 or + if index + length is greater than this.capacity + + + + + Fills this buffer with NULL (0x00) starting at the specified + absolute index. This method does not modify reader index + or writer index of this buffer + + absolute index in this byte buffer to start writing to + length the number of NULs to write to the buffer + + if the specified index is less than 0 or if index + length + is greater than capacity. + + + + + Writes the specified string at the current writer index and increases + the writer index by the written bytes. + + Index on which the string should be written + The string value. + Encoding that should be used. + The written number of bytes. + + if writable bytes is not large enough to write the whole string. + + + + + Gets a boolean at the current and increases the + by 1 in this buffer. + + if is less than 1 + + + + Gets a byte at the current and increases the + by 1 in this buffer. + + if is less than 1 + + + + Gets a short at the current and increases the + by 2 in this buffer. + + if is less than 2 + + + + Gets a short at the current in the Little Endian Byte Order and increases + the by 2 in this buffer. + + if is less than 2 + + + + Gets a 24-bit medium integer at the current and increases the + by 3 in this buffer. + + if is less than 3 + + + + Gets a 24-bit medium integer at the current in the Little Endian Byte Order and + increases the by 3 in this buffer. + + if is less than 3 + + + + Gets an unsigned 24-bit medium integer at the current and increases the + by 3 in this buffer. + + if is less than 3 + + + + Gets an unsigned 24-bit medium integer at the current in the Little Endian Byte Order + and increases the by 3 in this buffer. + + if is less than 3 + + + + Gets an unsigned short at the current and increases the + by 2 in this buffer. + + if is less than 2 + + + + Gets an unsigned short at the current in the Little Endian Byte Order and + increases the by 2 in this buffer. + + if is less than 2 + + + + Gets an integer at the current and increases the + by 4 in this buffer. + + if is less than 4 + + + + Gets an integer at the current in the Little Endian Byte Order and increases + the by 4 in this buffer. + + if is less than 4 + + + + Gets an unsigned integer at the current and increases the + by 4 in this buffer. + + if is less than 4 + + + + Gets an unsigned integer at the current in the Little Endian Byte Order and + increases the by 4 in this buffer. + + if is less than 4 + + + + Gets an long at the current and increases the + by 8 in this buffer. + + if is less than 4 + + + + Gets an long at the current in the Little Endian Byte Order and + increases the by 8 in this buffer. + + if is less than 4 + + + + Gets a 2-byte UTF-16 character at the current and increases the + + by 2 in this buffer. + + if is less than 2 + + + + Gets an 8-byte Decimaling integer at the current and increases the + + by 8 in this buffer. + + if is less than 8 + + + + Gets an 8-byte Decimaling integer at the current and increases the + by 8 in this buffer in Little Endian Byte Order. + + if is less than 8 + + + + Gets an 4-byte Decimaling integer at the current and increases the + + by 4 in this buffer. + + if is less than 4 + + + + Gets an 4-byte Decimaling integer at the current and increases the + by 4 in this buffer in Little Endian Byte Order. + + if is less than 4 + + + + Reads bytes from this buffer into a new destination buffer. + + + if is less than + + + + + Transfers bytes from this buffer's data into the specified destination buffer + starting at the current until the destination becomes + non-writable and increases the by the number of transferred bytes. + + + if destination. is greater than + . + + + + + Gets a string with the given length at the current reader index + and increases the reader index by the given length. + + The length to read + Encoding that should be used + The string value + + + + Increases the current by the specified in this buffer. + + if is greater than . + + + + Returns the maximum of that this buffer holds. Note that + + or might return a less number of s of + . + + + -1 if this buffer cannot represent its content as of . + the number of the underlying s if this buffer has at least one underlying segment. + Note that this method does not return 0 to avoid confusion. + + + + + + + + + Exposes this buffer's readable bytes as an of . Returned segment + shares the content with this buffer. This method is identical + to buf.GetIoBuffer(buf.ReaderIndex, buf.ReadableBytes). This method does not + modify or of this buffer. Please note that the + returned segment will not see the changes of this buffer if this buffer is a dynamic + buffer and it adjusted its capacity. + + + if this buffer cannot represent its content as + of + + + + + + + + Exposes this buffer's sub-region as an of . Returned segment + shares the content with this buffer. This method does not + modify or of this buffer. Please note that the + returned segment will not see the changes of this buffer if this buffer is a dynamic + buffer and it adjusted its capacity. + + + if this buffer cannot represent its content as + of + + + + + + + + Exposes this buffer's readable bytes as an array of of . Returned + segments + share the content with this buffer. This method does not + modify or of this buffer. Please note that + returned segments will not see the changes of this buffer if this buffer is a dynamic + buffer and it adjusted its capacity. + + + if this buffer cannot represent its content with + of + + + + + + + + Exposes this buffer's bytes as an array of of for the specified + index and length. + Returned segments share the content with this buffer. This method does + not modify or of this buffer. Please note that + returned segments will not see the changes of this buffer if this buffer is a dynamic + buffer and it adjusted its capacity. + + + if this buffer cannot represent its content with + of + + + + + + + + Flag that indicates if this is backed by a byte array or not + + + + + Grabs the underlying byte array for this buffer + + + + + Returns {@code true} if and only if this buffer has a reference to the low-level memory address that points + to the backing data. + + + + + Returns the low-level memory address that point to the first byte of ths backing data. + + The low-level memory address + + + + Returns the pointer address of the buffer if the memory is pinned. + + IntPtr.Zero if not pinned. + + + + Creates a deep clone of the existing byte array and returns it + + + + + Unwraps a nested buffer + + + + + Returns a copy of this buffer's readable bytes. Modifying the content of the + returned buffer or this buffer does not affect each other at all.This method is + identical to {@code buf.copy(buf.readerIndex(), buf.readableBytes())}. + This method does not modify {@code readerIndex} or {@code writerIndex} of this buffer. + + + + + Iterates over the readable bytes of this buffer with the specified processor in ascending order. + + + -1 if the processor iterated to or beyond the end of the readable bytes. + The last-visited index If the returned false. + + Processor. + + + + Iterates over the specified area of this buffer with the specified in ascending order. + (i.e. , (index + 1), .. (index + length - 1)) + + + -1 if the processor iterated to or beyond the end of the specified area. + The last-visited index If the returned false. + + Index. + Length. + Processor. + + + + Iterates over the readable bytes of this buffer with the specified in descending order. + + + -1 if the processor iterated to or beyond the beginning of the readable bytes. + The last-visited index If the returned false. + + Processor. + + + + Iterates over the specified area of this buffer with the specified in descending order. + (i.e. (index + length - 1), (index + length - 2), ... ) + + + -1 if the processor iterated to or beyond the beginning of the specified area. + The last-visited index If the returned false. + + Index. + Length. + Processor. + + + + Thread-safe interface for allocating /. + + + + + Returns the number of bytes of heap memory used by a {@link ByteBufAllocator} or {@code -1} if unknown. + + + + + Returns the number of bytes of direct memory used by a {@link ByteBufAllocator} or {@code -1} if unknown. + + + + + Returns a for a + + + + + Return the data which is held by this {@link ByteBufHolder}. + + + + + Create a deep copy of this {@link ByteBufHolder}. + + + + + Duplicate the {@link ByteBufHolder}. Be aware that this will not automatically call {@link #retain()}. + + + + + Duplicates this {@link ByteBufHolder}. This method returns a retained duplicate unlike {@link #duplicate()}. + + + + + Returns a new {@link ByteBufHolder} which contains the specified {@code content}. + + + + Returns the number of thread caches backed by this arena. + + + Returns the number of tiny sub-pages for the arena. + + + Returns the number of small sub-pages for the arena. + + + Returns the number of chunk lists for the arena. + + + Returns an unmodifiable {@link List} which holds {@link PoolSubpageMetric}s for tiny sub-pages. + + + Returns an unmodifiable {@link List} which holds {@link PoolSubpageMetric}s for small sub-pages. + + + Returns an unmodifiable {@link List} which holds {@link PoolChunkListMetric}s. + + + Return the number of allocations done via the arena. This includes all sizes. + + + Return the number of tiny allocations done via the arena. + + + Return the number of small allocations done via the arena. + + + Return the number of normal allocations done via the arena. + + + Return the number of huge allocations done via the arena. + + + Return the number of deallocations done via the arena. This includes all sizes. + + + Return the number of tiny deallocations done via the arena. + + + Return the number of small deallocations done via the arena. + + + Return the number of normal deallocations done via the arena. + + + Return the number of huge deallocations done via the arena. + + + Return the number of currently active allocations. + + + Return the number of currently active tiny allocations. + + + Return the number of currently active small allocations. + + + Return the number of currently active normal allocations. + + + Return the number of currently active huge allocations. + + + Return the number of active bytes that are currently allocated by the arena. + + + Return the minimum usage of the chunk list before which chunks are promoted to the previous list. + + + Return the maximum usage of the chunk list after which chunks are promoted to the next list. + + + Return the percentage of the current usage of the chunk. + + + Return the size of the chunk in bytes, this is the maximum of bytes that can be served out of the chunk. + + + Return the number of free bytes in the chunk. + + + Return the number of maximal elements that can be allocated out of the sub-page. + + + Return the number of available elements to be allocated. + + + Return the size (in bytes) of the elements that will be allocated. + + + Return the size (in bytes) of this page. + + + + Description of algorithm for PageRun/PoolSubpage allocation from PoolChunk + Notation: The following terms are important to understand the code + > page - a page is the smallest unit of memory chunk that can be allocated + > chunk - a chunk is a collection of pages + > in this code chunkSize = 2^{maxOrder} /// pageSize + To begin we allocate a byte array of size = chunkSize + Whenever a ByteBuf of given size needs to be created we search for the first position + in the byte array that has enough empty space to accommodate the requested size and + return a (long) handle that encodes this offset information, (this memory segment is then + marked as reserved so it is always used by exactly one ByteBuf and no more) + For simplicity all sizes are normalized according to PoolArena#normalizeCapacity method + This ensures that when we request for memory segments of size >= pageSize the normalizedCapacity + equals the next nearest power of 2 + To search for the first offset in chunk that has at least requested size available we construct a + complete balanced binary tree and store it in an array (just like heaps) - memoryMap + The tree looks like this (the size of each node being mentioned in the parenthesis) + depth=0 1 node (chunkSize) + depth=1 2 nodes (chunkSize/2) + .. + .. + depth=d 2^d nodes (chunkSize/2^d) + .. + depth=maxOrder 2^maxOrder nodes (chunkSize/2^{maxOrder} = pageSize) + depth=maxOrder is the last level and the leafs consist of pages + With this tree available searching in chunkArray translates like this: + To allocate a memory segment of size chunkSize/2^k we search for the first node (from left) at height k + which is unused + Algorithm: + ---------- + Encode the tree in memoryMap with the notation + memoryMap[id] = x => in the subtree rooted at id, the first node that is free to be allocated + is at depth x (counted from depth=0) i.e., at depths [depth_of_id, x), there is no node that is free + As we allocate and free nodes, we update values stored in memoryMap so that the property is maintained + Initialization - + In the beginning we construct the memoryMap array by storing the depth of a node at each node + i.e., memoryMap[id] = depth_of_id + Observations: + ------------- + 1) memoryMap[id] = depth_of_id => it is free / unallocated + 2) memoryMap[id] > depth_of_id => at least one of its child nodes is allocated, so we cannot allocate it, but + some of its children can still be allocated based on their availability + 3) memoryMap[id] = maxOrder + 1 => the node is fully allocated and thus none of its children can be allocated, it + is thus marked as unusable + Algorithm: [allocateNode(d) => we want to find the first node (from left) at height h that can be allocated] + ---------- + 1) start at root (i.e., depth = 0 or id = 1) + 2) if memoryMap[1] > d => cannot be allocated from this chunk + 3) if left node value <= h; we can allocate from left subtree so move to left and repeat until found + 4) else try in right subtree + Algorithm: [allocateRun(size)] + ---------- + 1) Compute d = log_2(chunkSize/size) + 2) Return allocateNode(d) + Algorithm: [allocateSubpage(size)] + ---------- + 1) use allocateNode(maxOrder) to find an empty (i.e., unused) leaf (i.e., page) + 2) use this handle to construct the PoolSubpage object or if it already exists just call init(normCapacity) + note that this PoolSubpage object is added to subpagesPool in the PoolArena when we init() it + Note: + ----- + In the implementation for improving cache coherence, + we store 2 pieces of information (i.e, 2 byte vals) as a short value in memoryMap + memoryMap[id]= (depth_of_id, x) + where as per convention defined above + the second value (i.e, x) indicates that the first node which is free to be allocated is at depth x (from root) + + + + Used to determine if the requested capacity is equal to or greater than pageSize. + + + Used to mark memory as unusable + + + Creates a special chunk that is not pooled. + + + Update method used by allocate + This is triggered only when a successor is allocated and all its predecessors + need to update their state + The minimal depth at which subtree rooted at id has some free space + + @param id id + + + Update method used by free + This needs to handle the special case when both children are completely free + in which case parent be directly allocated on request of size = child-size * 2 + + @param id id + + + Algorithm to allocate an index in memoryMap when we query for a free node + at depth d + + @param d depth + @return index in memoryMap + + + Allocate a run of pages (>=1) + + @param normCapacity normalized capacity + @return index in memoryMap + + + Create/ initialize a new PoolSubpage of normCapacity + Any PoolSubpage created/ initialized here is added to subpage pool in the PoolArena that owns this PoolChunk + + @param normCapacity normalized capacity + @return index in memoryMap + + + Free a subpage or a run of pages + When a subpage is freed from PoolSubpage, it might be added back to subpage pool of the owning PoolArena + If the subpage pool in PoolArena has at least one other PoolSubpage of given elemSize, we can + completely free the owning Page so it is available for subsequent allocations + + @param handle handle to free + + + represents the size in #bytes supported by node 'id' in the tree + + + Calculates the maximum capacity of a buffer that will ever be possible to allocate out of the {@link PoolChunk}s + that belong to the {@link PoolChunkList} with the given {@code minUsage} and {@code maxUsage} settings. + + + Moves the {@link PoolChunk} down the {@link PoolChunkList} linked-list so it will end up in the right + {@link PoolChunkList} that has the correct minUsage / maxUsage in respect to {@link PoolChunk#usage()}. + + + Adds the {@link PoolChunk} to this {@link PoolChunkList}. + + + Method must be called before reuse this {@link PooledByteBufAllocator} + + + Returns the status of the allocator (which contains all metrics) as string. Be aware this may be expensive + and so should not called too frequently. + + + Special constructor that creates a linked list head + + + Returns the bitmap index of the subpage allocation. + + + @return {@code true} if this subpage is in use. + {@code false} if this subpage is not used by its chunk and thus it's OK to be released. + + + + Acts a Thread cache for allocations. This implementation is moduled after + jemalloc and the descripted + technics of + + Scalable + memory allocation using jemalloc + + . + + + + Try to allocate a tiny buffer out of the cache. Returns {@code true} if successful {@code false} otherwise + + + Try to allocate a small buffer out of the cache. Returns {@code true} if successful {@code false} otherwise + + + Try to allocate a small buffer out of the cache. Returns {@code true} if successful {@code false} otherwise + + + Add {@link PoolChunk} and {@code handle} to the cache if there is enough room. + Returns {@code true} if it fit into the cache {@code false} otherwise. + + + Should be called if the Thread that uses this cache is about to exist to release resources out of the cache + + + Cache used for buffers which are backed by TINY or SMALL size. + + + Cache used for buffers which are backed by NORMAL size. + + + Init the {@link PooledByteBuffer} using the provided chunk and handle with the capacity restrictions. + + + Add to cache if not already full. + + + Allocate something out of the cache if possible and remove the entry from the cache. + + + Clear out this cache and free up all previous cached {@link PoolChunk}s and {@code handle}s. + + + Free up cached {@link PoolChunk}s if not allocated frequently enough. + + + + Utility class for managing and creating unpooled buffers + + + + + Creates a new big-endian buffer which wraps the specified array. + A modification on the specified array's content will be visible to the returned buffer. + + + + + Creates a new big-endian buffer which wraps the sub-region of the + specified array. A modification on the specified array's content + will be visible to the returned buffer. + + + + + Creates a new buffer which wraps the specified buffer's readable bytes. + A modification on the specified buffer's content will be visible to the returned buffer. + + The buffer to wrap. Reference count ownership of this variable is transfered to this method. + The readable portion of the buffer, or an empty buffer if there is no readable portion. + + + + Creates a new big-endian composite buffer which wraps the specified arrays without copying them. + A modification on the specified arrays' content will be visible to the returned buffer. + + + + + Creates a new big-endian composite buffer which wraps the readable bytes of the specified buffers without copying them. + A modification on the content of the specified buffers will be visible to the returned buffer. + + The buffers to wrap. Reference count ownership of all variables is transfered to this method. + The readable portion of the buffers. The caller is responsible for releasing this buffer. + + + + Creates a new big-endian composite buffer which wraps the specified arrays without copying them. + A modification on the specified arrays' content will be visible to the returned buffer. + + + + + Creates a new big-endian composite buffer which wraps the readable bytes of the specified buffers without copying them. + A modification on the content of the specified buffers will be visible to the returned buffer. + + Advisement as to how many independent buffers are allowed to exist before consolidation occurs. + The buffers to wrap. Reference count ownership of all variables is transfered to this method. + The readable portion of the buffers. The caller is responsible for releasing this buffer. + + + + Creates a new big-endian buffer whose content is a copy of the specified array + The new buffer's and + are 0 and respectively. + + A buffer we're going to copy. + The new buffer that copies the contents of array. + + + + Creates a new big-endian buffer whose content is a copy of the specified array. + The new buffer's and + are 0 and respectively. + + A buffer we're going to copy. + The index offset from which we're going to read array. + + The number of bytes we're going to read from array beginning from position offset. + + The new buffer that copies the contents of array. + + + + Creates a new big-endian buffer whose content is a copy of the specified . + The new buffer's and + are 0 and respectively. + + A buffer we're going to copy. + The new buffer that copies the contents of buffer. + + + + Creates a new big-endian buffer whose content is a merged copy of of the specified arrays. + The new buffer's and + are 0 and respectively. + + + + + + + Creates a new big-endian buffer whose content is a merged copy of the specified . + The new buffer's and + are 0 and respectively. + + Buffers we're going to copy. + The new buffer that copies the contents of buffers. + + + + Creates a new 4-byte big-endian buffer that holds the specified 32-bit integer. + + + + + Create a big-endian buffer that holds a sequence of the specified 32-bit integers. + + + + + Creates a new 2-byte big-endian buffer that holds the specified 16-bit integer. + + + + + Create a new big-endian buffer that holds a sequence of the specified 16-bit integers. + + + + + Create a new big-endian buffer that holds a sequence of the specified 16-bit integers. + + + + + Creates a new 3-byte big-endian buffer that holds the specified 24-bit integer. + + + + + Create a new big-endian buffer that holds a sequence of the specified 24-bit integers. + + + + + Creates a new 8-byte big-endian buffer that holds the specified 64-bit integer. + + + + + Create a new big-endian buffer that holds a sequence of the specified 64-bit integers. + + + + + Creates a new single-byte big-endian buffer that holds the specified boolean value. + + + + + Create a new big-endian buffer that holds a sequence of the specified boolean values. + + + + + Creates a new 4-byte big-endian buffer that holds the specified 32-bit floating point number. + + + + + Create a new big-endian buffer that holds a sequence of the specified 32-bit floating point numbers. + + + + + Creates a new 8-byte big-endian buffer that holds the specified 64-bit floating point number. + + + + + Create a new big-endian buffer that holds a sequence of the specified 64-bit floating point numbers. + + + + + Return a unreleasable view on the given {@link ByteBuf} which will just ignore release and retain calls. + + + + + Unpooled implementation of . + + + + Wraps another . + + It's important that the {@link #readerIndex()} and {@link #writerIndex()} will not do any adjustments on the + indices on the fly because of internal optimizations made by {@link ByteBufUtil#writeAscii(ByteBuf, CharSequence)} + and {@link ByteBufUtil#writeUtf8(ByteBuf, CharSequence)}. + +
+
diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.xml.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.xml.meta new file mode 100644 index 0000000..a87b2b2 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Buffers.xml.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 991956ddfc61ef14ca46d3c8eae721e9 +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll new file mode 100644 index 0000000..422cfd0 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll.mdb b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll.mdb new file mode 100644 index 0000000..843b1ff Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll.mdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll.mdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll.mdb.meta new file mode 100644 index 0000000..f43737e --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll.mdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 70f0a8fa49650a4458ed62e36ce70603 +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll.meta new file mode 100644 index 0000000..44f0b5e --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.dll.meta @@ -0,0 +1,33 @@ +fileFormatVersion: 2 +guid: 7fc98c66abee8d2489b521b5379ef98d +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + Any: + second: + enabled: 1 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.pdb b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.pdb new file mode 100644 index 0000000..32b9762 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.pdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.pdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.pdb.meta new file mode 100644 index 0000000..caffe26 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.pdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 0ae5a74b089e32d4f9e7f3ffe94e6072 +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.xml b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.xml new file mode 100644 index 0000000..2999d6b --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.xml @@ -0,0 +1,646 @@ + + + + DotNetty.Codecs + + + + + http://www.faqs.org/rfcs/rfc3548.html + Table 1: The Base 64 Alphabet + + + + + http://www.faqs.org/rfcs/rfc3548.html + Table 2: The "URL and Filename safe" Base 64 Alphabet + + + + + Cumulates instances of by merging them into one , using memory + copies. + + + + + Cumulate instances of by add them to a and therefore + avoiding memory copy when possible. + + + Be aware that use a more complex indexing implementation so depending on your + use-case + and the decoder implementation this may be slower then just use the . + + + + + Determines whether only one message should be decoded per call. + Default is false as this has performance impacts. + + Is particularly useful in support of protocol upgrade scenarios. + + + + Returns the actual number of readable bytes in the internal cumulative + buffer of this decoder. You usually do not need to rely on this value + to write a decoder. Use it only when you must use it at your own risk. + This method is a shortcut to of . + + + + + An which is thrown by a codec. + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/Adler32.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/CRC32.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/Deflate.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/Deflater.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/GZIPException.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/GZIPHeader.java + + http://www.ietf.org/rfc/rfc1952.txt + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/InfBlocks.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/InfCodes.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/Inflate.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/Inflater.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/InfTree.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/JZlib.java + + + + Creates a new zlib encoder with the specified {@code compressionLevel}, + the specified {@code windowBits}, the specified {@code memLevel}, and + the specified wrapper. + + @param compressionLevel + {@code 1} yields the fastest compression and {@code 9} yields the + best compression. {@code 0} means no compression. The default + compression level is {@code 6}. + @param windowBits + The base two logarithm of the size of the history buffer. The + value should be in the range {@code 9} to {@code 15} inclusive. + Larger values result in better compression at the expense of + memory usage. The default value is {@code 15}. + @param memLevel + How much memory should be allocated for the internal compression + state. {@code 1} uses minimum memory and {@code 9} uses maximum + memory. Larger values result in better and faster compression + at the expense of memory usage. The default value is {@code 8} + + @throws CompressionException if failed to initialize zlib + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/StaticTree.java + + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/Tree.java + + + + Close this {@link ZlibEncoder} and so finish the encoding. + + The returned {@link ChannelFuture} will be notified once the + operation completes. + + + The container file formats that wrap the stream compressed by the DEFLATE + algorithm. + + + The ZLIB wrapper as specified in RFC 1950. + + + The GZIP wrapper as specified in RFC 1952. + + + Raw DEFLATE stream only (no header and no footer). + + + Try {@link #ZLIB} first and then {@link #NONE} if the first attempt fails. + Please note that you can specify this wrapper type only when decompressing. + + + + https://github.com/ymnk/jzlib/blob/master/src/main/java/com/jcraft/jzlib/ZStream.java + + + + Those methods are expected to be override by Inflater and Deflater. + In the future, they will become abstract methods. + + + + A which is thrown when the received frame data could not + be decoded by an inbound handler. + + + + + A decoder that splits the received by one or more + delimiters.It is particularly useful for decoding the frames which ends + with a delimiter such as or + +

Specifying more than one delimiter

+ allows you to specify more than one + delimiter. If more than one delimiter is found in the buffer, it chooses + the delimiter which produces the shortest frame. For example, if you have + the following data in the buffer: + +--------------+ + | ABC\nDEF\r\n | + +--------------+ + a will choose '\n' as the first delimiter and produce two + frames: + +-----+-----+ + | ABC | DEF | + +-----+-----+ + rather than incorrectly choosing '\r\n' as the first delimiter: + +----------+ + | ABC\nDEF | + +----------+ +
+
+ + Common constructor + + The maximum length of the decoded frame + NOTE: A see is thrown if the length of the frame exceeds this + value. + + whether the decoded frame should strip out the delimiter or not + + If true, a is + thrown as soon as the decoder notices the length of the + frame will exceedmaxFrameLength regardless of + whether the entire frame has been read. + If false, a is + thrown after the entire frame that exceeds maxFrameLength has been read. + + delimiters + + + Returns true if the delimiters are "\n" and "\r\n" + + + ReturnsReturn true if the current instance is a subclass of DelimiterBasedFrameDecoder + + + Create a frame out of the and return it + + the which this + belongs to + + the from which to read data + + the which represent the frame or null if no frame could be + created. + + + + Returns the number of bytes between the readerIndex of the haystack and + the first needle found in the haystack. -1 is returned if no needle is + found in the haystack. + + + Returns a null (0x00) delimiter, which could be used for Flash XML socket or any similar protocols + + + + Returns {@code CR ('\r')} and {@code LF ('\n')} delimiters, which could + be used for text-based line protocols. + + + + + Splits a byte stream of JSON objects and arrays into individual objects/arrays and passes them up the + . + This class does not do any real parsing or validation. A sequence of bytes is considered a JSON object/array + if it contains a matching number of opening and closing braces/brackets. It's up to a subsequent + + to parse the JSON text into a more usable form i.e.a POCO. + + + + + + Create a new instance. + + + The maximum length of the frame. If the length of the frame is + greater than this value then will be thrown. + + The offset of the length field. + The length of the length field. + + + + Create a new instance. + + + The maximum length of the frame. If the length of the frame is + greater than this value then will be thrown. + + The offset of the length field. + The length of the length field. + The compensation value to add to the value of the length field. + the number of first bytes to strip out from the decoded frame. + + + + Create a new instance. + + + The maximum length of the frame. If the length of the frame is + greater than this value then will be thrown. + + The offset of the length field. + The length of the length field. + The compensation value to add to the value of the length field. + the number of first bytes to strip out from the decoded frame. + + If true, a is thrown as soon as the decoder notices the length + of the frame will exceeed regardless of whether the entire frame has been + read. If false, a is thrown after the entire frame that exceeds + has been read. + Defaults to true in other overloads. + + + + + Create a new instance. + + The of the lenght field. + + The maximum length of the frame. If the length of the frame is + greater than this value then will be thrown. + + The offset of the length field. + The length of the length field. + The compensation value to add to the value of the length field. + the number of first bytes to strip out from the decoded frame. + + If true, a is thrown as soon as the decoder notices the length + of the frame will exceeed regardless of whether the entire frame has been + read. If false, a is thrown after the entire frame that exceeds + has been read. + Defaults to true in other overloads. + + + + + Create a frame out of the and return it. + + + The which this belongs + to. + + The from which to read data. + The which represents the frame or null if no frame could be created. + + + + Decodes the specified region of the buffer into an unadjusted frame length. The default implementation is + capable of decoding the specified region into an unsigned 8/16/24/32/64 bit integer. Override this method to + decode the length field encoded differently. + Note that this method must not modify the state of the specified buffer (e.g. + , + , and the content of the buffer.) + + The buffer we'll be extracting the frame length from. + The offset from the absolute . + The length of the framelenght field. Expected: 1, 2, 3, 4, or 8. + The preferred of buffer. + A long integer that represents the unadjusted length of the next frame. + + + + An encoder that prepends the length of the message. The length value is + prepended as a binary form. +

+ For example, {@link LengthFieldPrepender}(2) will encode the + following 12-bytes string: +

+                    +----------------+
+                    | "HELLO, WORLD" |
+                    +----------------+
+                
+ into the following: +
+                    +--------+----------------+
+                    + 0x000C | "HELLO, WORLD" |
+                    +--------+----------------+
+                
+ If you turned on the {@code lengthIncludesLengthFieldLength} flag in the + constructor, the encoded data would look like the following + (12 (original data) + 2 (prepended data) = 14 (0xE)): +
+                    +--------+----------------+
+                    + 0x000E | "HELLO, WORLD" |
+                    +--------+----------------+
+                
+
+
+ + + Creates a new instance. + + + The length of the prepended length field. + Only 1, 2, 3, 4, and 8 are allowed. + + + + + Creates a new instance. + + + The length of the prepended length field. + Only 1, 2, 3, 4, and 8 are allowed. + + + If true, the length of the prepended length field is added + to the value of the prepended length field. + + + + + Creates a new instance. + + + The length of the prepended length field. + Only 1, 2, 3, 4, and 8 are allowed. + + The compensation value to add to the value of the length field. + + + + Creates a new instance. + + + The length of the prepended length field. + Only 1, 2, 3, 4, and 8 are allowed. + + + If true, the length of the prepended length field is added + to the value of the prepended length field. + + The compensation value to add to the value of the length field. + + + + Creates a new instance. + + The of the length field. + + The length of the prepended length field. + Only 1, 2, 3, 4, and 8 are allowed. + + + If true, the length of the prepended length field is added + to the value of the prepended length field. + + The compensation value to add to the value of the length field. + + + + A decoder that splits the received {@link ByteBuf}s on line endings. + Both {@code "\n"} and {@code "\r\n"} are handled. + For a more general delimiter-based decoder, see {@link DelimiterBasedFrameDecoder}. + + + + Maximum length of a frame we're willing to decode. + + + Whether or not to throw an exception as soon as we exceed maxLength. + + + True if we're discarding input because we're already over maxLength. + + + + Initializes a new instance of the class. + + + the maximum length of the decoded frame. + A {@link TooLongFrameException} is thrown if + the length of the frame exceeds this value. + + + + + Initializes a new instance of the class. + + + the maximum length of the decoded frame. + A {@link TooLongFrameException} is thrown if + the length of the frame exceeds this value. + + + whether the decoded frame should strip out the + delimiter or not + + + If true, a {@link TooLongFrameException} is + thrown as soon as the decoder notices the length of the + frame will exceed maxFrameLength regardless of + whether the entire frame has been read. + If false, a {@link TooLongFrameException} is + thrown after the entire frame that exceeds + maxFrameLength has been read. + + + + + Create a frame out of the {@link ByteBuf} and return it. + + the {@link ChannelHandlerContext} which this {@link ByteToMessageDecoder} belongs to + the {@link ByteBuf} from which to read data + + + + + An abstract that aggregates a series of message objects + into a single aggregated message. + 'A series of messages' is composed of the following: + a single start message which optionally contains the first part of the content, and + 1 or more content messages. The content of the aggregated message will be the merged + content of the start message and its following content messages. If this aggregator + encounters a content message where { @link #isLastContentMessage(ByteBufHolder)} + return true for, the aggregator will finish the aggregation and produce the aggregated + message and expect another start message. + + The type that covers both start message and content message + The type of the start message + The type of the content message + The type of the aggregated message + + + + Message to message decoder. + + + + + Decode from one message to an other. This method will be called for each written message that can be handled + by this encoder. + + the {@link ChannelHandlerContext} which this {@link MessageToMessageDecoder} belongs to + the message to decode to an other one + the {@link List} to which decoded messages should be added + + + + Returns {@code true} if the given message should be handled. If {@code false} it will be passed to the next + {@link ChannelHandler} in the {@link ChannelPipeline}. + + + + + Encode from one message to an other. This method will be called for each written message that can be handled + by this encoder. + @param context the {@link ChannelHandlerContext} which this {@link MessageToMessageEncoder} belongs to + @param message the message to encode to an other one + @param output the {@link List} into which the encoded message should be added + needs to do some kind of aggragation + @throws Exception is thrown if an error accour + + + + + A decoder that splits the received {@link ByteBuf}s dynamically by the + value of the Google Protocol Buffers + http://code.google.com/apis/protocolbuffers/docs/encoding.html#varints + Base 128 Varints integer length field in the message. + For example: + + BEFORE DECODE (302 bytes) AFTER DECODE (300 bytes) + +--------+---------------+ +---------------+ + | Length | Protobuf Data |----->| Protobuf Data | + | 0xAC02 | (300 bytes) | | (300 bytes) | + +--------+---------------+ +---------------+ + + + + + An encoder that prepends the the Google Protocol Buffers + http://code.google.com/apis/protocolbuffers/docs/encoding.html#varints + Base 128 Varints integer length field. + For example: + + BEFORE ENCODE (300 bytes) AFTER ENCODE (302 bytes) + +---------------+ +--------+---------------+ + | Protobuf Data |-------------->| Length | Protobuf Data | + | (300 bytes) | | 0xAC02 | (300 bytes) | + +---------------+ +--------+---------------+ + + + + Initializes a new instance of the class with the current system + character set. + + + + + Initializes a new instance of the class with the specified character + set.. + + Encoding. + + + Encodes the requested {@link String} into a {@link ByteBuf}. + A typical setup for a text-based line protocol in a TCP/IP socket would be: +
+             {@link ChannelPipeline} pipeline = ...;
+            
+             // Decoders
+             pipeline.addLast("frameDecoder", new {@link LineBasedFrameDecoder}(80));
+             pipeline.addLast("stringDecoder", new {@link StringDecoder}(CharsetUtil.UTF_8));
+            
+             // Encoder
+             pipeline.addLast("stringEncoder", new {@link StringEncoder}(CharsetUtil.UTF_8));
+             
+ and then you can use a {@link String} instead of a {@link ByteBuf} + as a message: +
+             void channelRead({@link ChannelHandlerContext} ctx, {@link String} msg) {
+                 ch.write("Did you say '" + msg + "'?\n");
+             }
+             
+
+ + + Initializes a new instance of the class with the current system + character set. + + + + + Initializes a new instance of the class with the specified character + set.. + + Encoding. + + + + A which is thrown when the length of the frame + decoded is greater than the allowed maximum. + + + + + Thrown if an unsupported message is received by an codec. + + +
+
diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.xml.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.xml.meta new file mode 100644 index 0000000..bfd9f27 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Codecs.xml.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: cf54e07b609f28c4c9ed1409ba230ad6 +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll b/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll new file mode 100644 index 0000000..5a7e78f Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll.mdb b/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll.mdb new file mode 100644 index 0000000..8ffc450 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll.mdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll.mdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll.mdb.meta new file mode 100644 index 0000000..a29dcba --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll.mdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 206957aa7baaebe49899acc8f0e8c89a +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll.meta new file mode 100644 index 0000000..fc25e08 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Common.dll.meta @@ -0,0 +1,33 @@ +fileFormatVersion: 2 +guid: 1a268e2cd81ae1240a54deedb4231b26 +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + Any: + second: + enabled: 1 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Common.pdb b/Runtime/csharp-kcp/Plugins/DotNetty.Common.pdb new file mode 100644 index 0000000..59126ed Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Common.pdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Common.pdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Common.pdb.meta new file mode 100644 index 0000000..a524ee8 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Common.pdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: a21e213b5e1e0684cb3e00a30252902f +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Common.xml b/Runtime/csharp-kcp/Plugins/DotNetty.Common.xml new file mode 100644 index 0000000..f7be6b7 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Common.xml @@ -0,0 +1,2149 @@ + + + + DotNetty.Common + + + + + Abstract base class for implementations + + + + Creates an instance of . + + + Creates an instance of . + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Abstract base class for s that need to support scheduling. + + + + + Cancel all scheduled tasks + This method MUST be called only when is true. + + + + + Parent . + + + + + Returns true if the current belongs to this event loop, + false otherwise. + + + It is a convenient way to determine whether code can be executed directly or if it + should be posted for execution to this executor instance explicitly to ensure execution in the loop. + + + + + Returns true if the given belongs to this event loop, + false> otherwise. + + + + + Provides an access to a set of s it manages. + + + + + Returns list of owned event executors. + + + + + Returns true if and only if this executor is being shut down via . + + + + + Terminates this and all its s. + + for completion of termination. + + + + Terminates this and all its s. + + for completion of termination. + + + + A for completion of termination. . + + + + + Returns . + + + + + Executes the given task. + + Threading specifics are determined by IEventExecutor implementation. + + + + Executes the given action. + + + parameter is useful to when repeated execution of an action against + different objects is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Executes the given . + + Threading specifics are determined by IEventExecutor implementation. + + + + Executes the given action. + + + and parameters are useful when repeated execution of + an action against different objects in different context is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Returns true if this executor has been shut down, false otherwise. + + + + + Returns true if all tasks have completed following shut down. + + + Note that is never true unless was called first. + + + + + Executes the given function and returns indicating completion status and result of + execution. + + + Threading specifics are determined by IEventExecutor implementation. + + + + + Executes the given action and returns indicating completion status and result of execution. + + + Threading specifics are determined by IEventExecutor implementation. + + + + + Executes the given action and returns indicating completion status and result of execution. + + + parameter is useful to when repeated execution of an action against + different objects is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Executes the given action and returns indicating completion status and result of execution. + + + parameter is useful to when repeated execution of an action against + different objects is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Executes the given action and returns indicating completion status and result of execution. + + + and parameters are useful when repeated execution of + an action against different objects in different context is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Executes the given action and returns indicating completion status and result of execution. + + + and parameters are useful when repeated execution of + an action against different objects in different context is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Creates and executes a one-shot action that becomes enabled after the given delay. + + the task to execute + the time from now to delay execution + an representing pending completion of the task. + + + + Schedules the given action for execution after the specified delay would pass. + + + Threading specifics are determined by IEventExecutor implementation. + + + + + Schedules the given action for execution after the specified delay would pass. + + + parameter is useful to when repeated execution of an action against + different objects is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Schedules the given action for execution after the specified delay would pass. + + + and parameters are useful when repeated execution of + an action against different objects in different context is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Schedules the given action for execution at a fixed frequency after the specified delay would pass. + + + + + + + + + Schedules the given action for execution at a fixed frequency after the specified delay would pass. + + + + + + + + + Schedules the given action for execution at a fixed delay after the specified delay would pass. + + + + + + + + + Schedules the given action for execution at a fixed delay after the specified delay would pass. + + + + + + + + + Schedules the given action for execution after the specified delay would pass. + + + parameter is useful to when repeated execution of an action against + different objects is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Schedules the given action for execution after the specified delay would pass. + + + parameter is useful to when repeated execution of an action against + different objects is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Schedules the given action for execution after the specified delay would pass. + + + Threading specifics are determined by IEventExecutor implementation. + + + + + Schedules the given action for execution after the specified delay would pass. + + + Threading specifics are determined by IEventExecutor implementation. + + + + + Schedules the given action for execution after the specified delay would pass. + + + and parameters are useful when repeated execution of + an action against different objects in different context is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Schedules the given action for execution after the specified delay would pass. + + + and parameters are useful when repeated execution of + an action against different objects in different context is needed. + Threading specifics are determined by IEventExecutor implementation. + + + + + Reuse ReusableScheduledTask to optimize GC. + + + + + backed by a single thread. + + + + Creates a new instance of . + + + Creates a new instance of . + + + + Task Scheduler that will post work to this executor's queue. + + + + + Allows to track whether executor is progressing through its backlog. Useful for diagnosing / mitigating stalls due to blocking calls in conjunction with IsBacklogEmpty property. + + + + + Indicates whether executor's backlog is empty. Useful for diagnosing / mitigating stalls due to blocking calls in conjunction with Progress property. + + + + + Gets length of backlog of tasks queued for immediate execution. + + + + + + + + + + + + + + + + + + + + + + + Adds an which will be executed on shutdown of this instance. + + The to run on shutdown. + + + + Removes a previously added from the collection of s which will be + executed on shutdown of this instance. + + The to remove. + + + + + + + Removes all variables bound to the current thread. This operation is useful when you + are in a container environment, and you don't want to leave the thread local variables in the threads you do not + manage. + + + + + Destroys the data structure that keeps all variables accessed from + non-s. This operation is useful when you are in a container environment, and + you do not want to leave the thread local variables in the threads you do not manage. Call this method when + your application is being unloaded from the container. + + + + + Sets the value to uninitialized; a proceeding call to get() will trigger a call to GetInitialValue(). + + + + + + Returns the number of thread local variables bound to the current thread. + + + + + Gets or sets current value for the current thread. + + + + + Returns the current value for the specified thread local map. + The specified thread local map must be for the current thread. + + + + + Set the value for the specified thread local map. The specified thread local map must be for the current thread. + + + + + Returns true if and only if this thread-local variable is set. + + + + + Returns true if and only if this thread-local variable is set. + The specified thread local map must be for the current thread. + + + + + Returns the initial value for this thread-local variable. + + + + + Sets the value to uninitialized for the specified thread local map; + a proceeding call to will trigger a call to . + The specified thread local map must be for the current thread. + + + The from which this should be removed. + + + + + Invoked when this thread local variable is removed by . + + + + + The internal data structure that stores the thread-local variables for DotNetty and all + s. Note that this class is for internal use only and is subject to change at any + time. Use unless you know what you are doing. + + + + + Used by . + + + + + Sets a value at the given index in this . + + The desired index at which a value should be set. + The value to set at the given index. + true if and only if a new thread-local variable has been created. + + + + Resets the . Be aware this will only reset the current internal + position and not shrink the internal char array. + + + + + Create a new ascii string, this method assumes all chars has been sanitized to ascii chars when appending + to the array. + + + + Forked from + JCTools + . + A concurrent access enabling class used by circular array based queues this class exposes an offset computation + method along with differently memory fenced load/store methods into the underlying array. The class is pre-padded and + the array is padded on either side to help with False sharing prvention. It is expected theat subclasses handle post + padding. +

+ Offset calculation is separate from access to enable the reuse of a give compute offset. +

+ Load/Store methods using a + buffer + parameter are provided to allow the prevention of field reload after a + LoadLoad barrier. +

+ + +

+ Calculates an element offset based on a given array index. + + The desirable element index. + The offset in bytes within the array for a given index. +
+ + + A plain store (no ordering/fences) of an element to a given offset. + + Computed via . + A kitty. + + + + An ordered store(store + StoreStore barrier) of an element to a given offset. + + Computed via . + An orderly kitty. + + + + A plain load (no ordering/fences) of an element from a given offset. + + Computed via . + The element at the offset. + + + + A volatile load (load + LoadLoad barrier) of an element from a given offset. + + Computed via . + The element at the offset. + + + + A skeletal implementation of . This class implements + all methods that have a parameter by default to call + specific logger methods such as or . + + + + + Creates a new instance. + + A friendly name for the new logger instance. + + + + Holds the results of formatting done by . + + + + + Internal-use-only logger used by DotNetty. DO NOT + access this class outside of DotNetty. + + + + + Return the name of this instance. + + name of this logger instance + + + + Is this logger instance enabled for the TRACE level? + + true if this Logger is enabled for level TRACE, false otherwise. + + + + Log a message object at level TRACE. + + the message object to be logged + + + + Log a message at level TRACE according to the specified format and + argument. + + This form avoids superfluous object creation when the logger is disabled + for level TRACE. + + + the format string + the argument + + + + Log a message at level TRACE according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for level TRACE. + + + the format string + the first argument + the second argument + + + + Log a message at level TRACE according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for level TRACE. However, this variant incurs the hidden + (and relatively small) cost of creating an object[] + before invoking the method, + even if this logger is disabled for TRACE. The variants + and + arguments exist solely to avoid this hidden cost. + + + the format string + an array of arguments + + + + Log an exception at level TRACE with an accompanying message. + + the message accompanying the exception + the exception to log + + + + Log an exception at level TRACE. + + the exception to log + + + + Is this logger instance enabled for the DEBUG level? + + true if this Logger is enabled for level DEBUG, false otherwise. + + + + Log a message object at level DEBUG. + + the message object to be logged + + + + Log a message at level DEBUG according to the specified format and + argument. + + This form avoids superfluous object creation when the logger is disabled + for level DEBUG. + + + the format string + the argument + + + + Log a message at level DEBUG according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for level DEBUG. + + + the format string + the first argument + the second argument + + + + Log a message at level DEBUG according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for level DEBUG. However, this variant incurs the hidden + (and relatively small) cost of creating an object[] + before invoking the method, + even if this logger is disabled for DEBUG. The variants + and + arguments exist solely to avoid this hidden cost. + + + the format string + an array of arguments + + + + Log an exception at level DEBUG with an accompanying message. + + the message accompanying the exception + the exception to log + + + + Log an exception at level DEBUG. + + the exception to log + + + + Is this logger instance enabled for the INFO level? + + true if this Logger is enabled for level INFO, false otherwise. + + + + Log a message object at level INFO. + + the message object to be logged + + + + Log a message at level INFO according to the specified format and + argument. + + This form avoids superfluous object creation when the logger is disabled + for level INFO. + + + the format string + the argument + + + + Log a message at level INFO according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for level INFO. + + + the format string + the first argument + the second argument + + + + Log a message at level INFO according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for level INFO. However, this variant incurs the hidden + (and relatively small) cost of creating an object[] + before invoking the method, + even if this logger is disabled for INFO. The variants + and + arguments exist solely to avoid this hidden cost. + + + the format string + an array of arguments + + + + Log an exception at level INFO with an accompanying message. + + the message accompanying the exception + the exception to log + + + + Log an exception at level INFO. + + the exception to log + + + + Is this logger instance enabled for the WARN level? + + true if this Logger is enabled for level WARN, false otherwise. + + + + Log a message object at level WARN. + + the message object to be logged + + + + Log a message at level WARN according to the specified format and + argument. + + This form avoids superfluous object creation when the logger is disabled + for level WARN. + + + the format string + the argument + + + + Log a message at level WARN according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for level WARN. However, this variant incurs the hidden + (and relatively small) cost of creating an object[] + before invoking the method, + even if this logger is disabled for WARN. The variants + and + arguments exist solely to avoid this hidden cost. + + + the format string + an array of arguments + + + + Log a message at level WARN according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for level WARN. + + + the format string + the first argument + the second argument + + + + Log an exception at level WARN with an accompanying message. + + the message accompanying the exception + the exception to log + + + + Log an exception at level WARN. + + the exception to log + + + + Is this logger instance enabled for the ERROR level? + + true if this Logger is enabled for level ERROR, false otherwise. + + + + Log a message object at level ERROR. + + the message object to be logged + + + + Log a message at level ERROR according to the specified format and + argument. + + This form avoids superfluous object creation when the logger is disabled + for level ERROR. + + + the format string + the argument + + + + Log a message at level ERROR according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for level ERROR. + + + the format string + the first argument + the second argument + + + + Log a message at level ERROR according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for level ERROR. However, this variant incurs the hidden + (and relatively small) cost of creating an object[] + before invoking the method, + even if this logger is disabled for ERROR. The variants + and + arguments exist solely to avoid this hidden cost. + + + the format string + an array of arguments + + + + Log an exception at level ERROR with an accompanying message. + + the message accompanying the exception + the exception to log + + + + Log an exception at level ERROR. + + the exception to log + + + + Is the logger instance enabled for the specified ? + + log level + true if this Logger is enabled for the specified , false otherwise. + + + + Log a message object at a specified . + + log level + the message object to be logged + + + + Log a message at a specified according to the specified format and + argument. + + This form avoids superfluous object creation when the logger is disabled + for the specified . + + + log level + the format string + the argument + + + + Log a message at a specified according to the specified format and + arguments. + + This form avoids superfluous object creation when the logger is disabled + for the specified . + + + log level + the format string + the first argument + the second argument + + + + Log a message at the specified according to the specified format + and arguments. + + This form avoids superfluous string concatenation when the logger + is disabled for the specified . However, this variant incurs the hidden + (and relatively small) cost of creating an object[] before invoking the method, + even if this logger is disabled for the specified . The variants + and + arguments exist solely + in order to avoid this hidden cost. + + + log level + the format string + an array of arguments + + + + Log an exception at the specified with an + accompanying message. + + log level + the message accompanying the exception + the exception to log + + + + Log an exception at the specified . + + log level + the exception to log + + + + Creates an or changes the default factory + implementation. This factory allows you to choose what logging framework + DotNetty should use. The default factory is own with registered. + You can change it to your preferred logging framework before other DotNetty classes are loaded: +
+                    InternalLoggerFactory.DefaultFactory = new LoggerFactory();
+                
+ Please note that the new default factory is effective only for the classes + which were loaded after the default factory is changed. Therefore, should be set as early + as possible and should not be called more than once. +
+
+ + + Creates a new logger instance with the name of the specified type. + + type where logger is used + logger instance + + + + Creates a new logger instance with the name of the specified type. + + type where logger is used + logger instance + + + + Creates a new logger instance with the specified name. + + logger name + logger instance + + + + The log level that can log at. + + + + + 'TRACE' log level. + + + + + 'DEBUG' log level. + + + + + 'INFO' log level. + + + + + 'WARN' log level. + + + + + 'ERROR' log level. + + + + + Formats messages according to very simple substitution rules. Substitutions can be made 1, 2 or more arguments. + For example, + + MessageFormatter.Format("Hi {}.", "there") + + + will return the string "Hi there.". + + + The {} pair is called the formatting anchor. It serves to designate the location where arguments need + to be substituted within the message pattern. + + + In case your message contains the '{' or the '}' character, you do not have to do anything special unless the + '}' character immediately follows '{'. For example, + + + MessageFormatter.Format("Set {1,2,3} is not equal to {}.", "1,2"); + + + will return the string "Set {1,2,3} is not equal to 1,2.". + + + If for whatever reason you need to place the string "{}" in the message without its formatting anchor + meaning, then you need to escape the '{' character with '\', that is the backslash character. Only the '{' + character should be escaped. There is no need to escape the '}' character. For example, + + + MessageFormatter.Format("Set \\{} is not equal to {}.", "1,2"); + + + will return the string "Set {} is not equal to 1,2.". + + + The escaping behavior just described can be overridden by escaping the escape character '\'. Calling + + + MessageFormatter.Format("File name is C:\\\\{}.", "file.zip"); + + + will return the string "File name is C:\file.zip". + + + + + + + + + Performs single argument substitution for the given . + + For example, + + + MessageFormatter.Format("Hi {}.", "there"); + + + will return the string "Hi there.". + + + The message pattern which will be parsed and formatted + The argument to be substituted in place of the formatting anchor + The formatted message + + + + Performs a two argument substitution for the given . + + For example, + + + MessageFormatter.Format("Hi {}. My name is {}.", "Alice", "Bob"); + + + will return the string "Hi Alice. My name is Bob.". + + + The message pattern which will be parsed and formatted + The argument to be substituted in place of the first formatting anchor + The argument to be substituted in place of the second formatting anchor + The formatted message + + + + Same principle as the and + methods, except that any number of arguments can be passed in an array. + + The message pattern which will be parsed and formatted + An array of arguments to be substituted in place of formatting anchors + The formatted message + + + Length of a valid MAC address. + + + Obtains the best MAC address found on local network interfaces. + Generally speaking, an active network interface used on public + networks is better than a local network interface. + byte array containing a MAC. null if no MAC can be found. + + + byte array of a MAC address. + hex formatted MAC address. + + + positive - current is better, 0 - cannot tell from MAC addr, negative - candidate is better. + + + positive - current is better, 0 - cannot tell, negative - candidate is better + + + + Forked from JCTools. + A Multi-Producer-Single-Consumer queue based on a . This implies + that any thread may call the Enqueue methods, but only a single thread may call poll/peek for correctness to + maintained. + + This implementation follows patterns documented on the package level for False Sharing protection. + + + This implementation is using the Fast Flow + method for polling from the queue (with minor change to correctly publish the index) and an extension of + the Leslie Lamport concurrent queue algorithm (originated by Martin Thompson) on the producer side. + + + The type of each item in the queue. + + + + Lock free Enqueue operation, using a single compare-and-swap. As the class name suggests, access is + permitted to many threads concurrently. + + The item to enqueue. + true if the item was added successfully, otherwise false. + + + + + A wait-free alternative to , which fails on compare-and-swap failure. + + The item to enqueue. + 1 if next element cannot be filled, -1 if CAS failed, and 0 if successful. + + + + Lock free poll using ordered loads/stores. As class name suggests, access is limited to a single thread. + + The dequeued item. + true if an item was retrieved, otherwise false. + + + + + Lock free peek using ordered loads. As class name suggests access is limited to a single thread. + + The peeked item. + true if an item was retrieved, otherwise false. + + + + + Returns the number of items in this . + + + + + A plain store (no ordering/fences) of an element to a given offset. + + The element type. + The source buffer. + Computed via + An orderly kitty. + + + + An ordered store(store + StoreStore barrier) of an element to a given offset. + + The element type. + The source buffer. + Computed via + + + + + A plain load (no ordering/fences) of an element from a given offset. + + The element type. + The source buffer. + Computed via + The element at the given in the given . + + + + A volatile load (load + LoadLoad barrier) of an element from a given offset. + + The element type. + The source buffer. + Computed via + The element at the given in the given . + + + + Gets the offset in bytes within the array for a given index. + + The desired element index. + Mask for the index. + The offset (in bytes) within the array for a given index. + + + + Called from a producer thread subject to the restrictions appropriate to the implementation and + according to the interface. + + The element to enqueue. + true if the element was inserted, false iff the queue is full. + + + + Called from the consumer thread subject to the restrictions appropriate to the implementation and + according to the interface. + + A message from the queue if one is available, null iff the queue is empty. + + + + Called from the consumer thread subject to the restrictions appropriate to the implementation and + according to the interface. + + A message from the queue if one is available, null iff the queue is empty. + + + + A collection of utility methods to retrieve and parse the values of the system properties (Environment variables). + + + + + Returns true if and only if the system property with the specified key + exists. + + + + + Returns the value of the system property with the specified + key, while falling back to null if the property access fails. + + the property value or null + + + + Returns the value of the system property with the specified + key, while falling back to the specified default value if + the property access fails. + + + the property value. + def if there's no such property or if an access to the + specified property is not allowed. + + + + + Returns the value of the system property with the specified + key, while falling back to the specified default value if + the property access fails. + + + the property value or def if there's no such property or + if an access to the specified property is not allowed. + + + + + Returns the value of the system property with the specified + key, while falling back to the specified default value if + the property access fails. + + + the property value. + def if there's no such property or if an access to the + specified property is not allowed. + + + + + Returns the value of the system property with the specified + key, while falling back to the specified default value if + the property access fails. + + + the property value. + def if there's no such property or if an access to the + specified property is not allowed. + + + + + Reference counting interface for reusable objects + + + + + Returns the reference count of this object + + + + + Increases the reference count by 1 + + + + + Increases the reference count by + + + + + Records the current access location of this object for debugging purposes. + If this object is determined to be leaked, the information recorded by this operation will be provided to you + via . This method is a shortcut to with null as + an argument. + + + + + + Records the current access location of this object with an additonal arbitrary information for debugging + purposes. If this object is determined to be leaked, the information recorded by this operation will be + provided to you via . + + + + + Decreases the reference count by 1 and deallocates this object if the reference count reaches 0. + + true if and only if the reference count is 0 and this object has been deallocated + + + + Decreases the reference count by and deallocates this object if the reference count + reaches 0. + + true if and only if the reference count is 0 and this object has been deallocated + + + + A hint object that provides human-readable message for easier resource leak tracking. + + + + + Returns a human-readable message that potentially enables easier resource leak tracking. + + + + + + Records the caller's current stack trace so that the can tell where the + leaked + resource was accessed lastly. This method is a shortcut to with null as an + argument. + + + + + Records the caller's current stack trace and the specified additional arbitrary information + so that the can tell where the leaked resource was accessed lastly. + + + + + + Close the leak so that does not warn about leaked resources. + + true if called first time, false if called already + + + + Represents the level of resource leak detection. + + + + + Disables resource leak detection. + + + + + Enables simplistic sampling resource leak detection which reports there is a leak or not, + at the cost of small overhead (default). + + + + + Enables advanced sampling resource leak detection which reports where the leaked object was accessed + recently at the cost of high overhead. + + + + + Enables paranoid resource leak detection which reports where the leaked object was accessed recently, + at the cost of the highest possible overhead (for testing purposes only). + + + + Returns true if resource leak detection is enabled. + + + + Gets or sets resource leak detection level + + + + + Creates a new which is expected to be closed + when the + related resource is deallocated. + + the or null + + + + Schedules the specified to run when the specified dies. + + + + + Cancels the task scheduled via . + + + + + Waits until the thread of this watcher has no threads to watch and terminates itself. + Because a new watcher thread will be started again on , + this operation is only useful when you want to ensure that the watcher thread is terminated + after your application is shut down and there's no chance of calling + afterwards. + + + true if and only if the watcher thread has been terminated. + + + + Allocate a new or return null if not possible. + + + + Base implementation of . + + + Creates a new instance. + + + + Extension methods used for slicing byte arrays + + + + + Merge the byte arrays into one byte array. + + + + + During normal use cases the AsciiString should be immutable, but if the + underlying array is shared, and changes then this needs to be called. + + + + + Implementation of the java.concurrent.util AtomicReference type. + + + + + Sets the initial value of this to . + + + + + Default constructor + + + + + The current value of this + + + + + If equals , then set the Value to + + Returns true if was set, false otherwise. + + + + + Implicit conversion operator = automatically casts the to an instance of + + + + + + Implicit conversion operator = allows us to cast any type directly into a + instance. + + + + + + + Key which can be used to access out of the . Be aware that + it is not be possible to have multiple keys with the same name. + + + the type of the which can be accessed via this . + + + + Returns the singleton instance of the {@link AttributeKey} which has the specified name. + + + Returns true if a exists for the given name. + + + + Creates a new for the given name or fail with an + if a for the given name exists. + + + + + Provides a mechanism to iterate over a collection of bytes. + + + + + Aborts on a NUL (0x00). + + + + + Aborts on a non-NUL (0x00). + + + + + Aborts on a CR ('\r'). + + + + + Aborts on a non-CR ('\r'). + + + + + Aborts on a LF ('\n'). + + + + + Aborts on a non-LF ('\n'). + + + + + Aborts on a CR (';'). + + + + + Aborts on a comma (','). + + + + + Aborts on a ascii space character (' '). + + + + + Aborts on a CR ('\r') or a LF ('\n'). + + + + + Aborts on a byte which is neither a CR ('\r') nor a LF ('\n'). + + + + + Aborts on a linear whitespace (a ' ' or a '\t'). + + + + + Aborts on a byte which is not a linear whitespace (neither ' ' nor '\t'). + + + + + A pool of s. + + + + Shortcut of this.ValueOf(firstNameComponent.Name + "#" + secondNameComponent). + + + + Returns the which is assigned to the specified name. + If there's no such , a new one will be created and returned. + Once created, the subsequent calls with the same name will always return the previously created one + (i.e. singleton.) + + the name of the + + + Returns true if a exists for the given name. + + + + Creates a new for the given name or fail with an + if a for the given name exists. + + + + + Default implementation which use simple synchronization per bucket to keep the memory + overhead + as low as possible. + + + + + Creates a new timer. + + the interval between two consecutive ticks + the size of the wheel + The maximum number of pending timeouts after which call to + newTimeout will result in being thrown. + No maximum pending timeouts limit is assumed if this value is 0 or negative. + if either of tickInterval and ticksPerWheel is <= 0 + + + + Starts the background thread explicitly. The background thread will + start automatically on demand even if you did not call this method. + + if this timer has been + stopped already. + + + + calculate timer firing time from startTime and current tick number, + then wait until that goal has been reached. + + long.MinValue if received a shutdown request, + current time otherwise (with long.MinValue changed by +1) + + + + + Bucket that stores HashedWheelTimeouts. These are stored in a linked-list like datastructure to allow easy + removal of HashedWheelTimeouts in the middle. Also the HashedWheelTimeout act as nodes themself and so no + extra object creation is needed. + + + + + Add a to this bucket. + + + + + Expire all s for the given deadline. + + + + + Clear this bucket and return all not expired / cancelled s. + + + + An attribute which allows to store a value reference. It may be updated atomically and so is thread-safe. + the type of the value it holds. + + + + Returns the key of this attribute. + + + + + Returns the current value, which may be null + + + + + Sets the value + + + + + Atomically sets to the given value and returns the old value which may be null if non was set before. + + + + + Atomically sets to the given value if this 's value is null. + If it was not possible to set the value as it contains a value it will just return the current value. + + + + + Removes this attribute from the and returns the old value. Subsequent + + calls will return null. + If you only want to return the old value and clear the while still keep it in + use with a value of null. + + + + + Atomically sets the value to the given updated value if the current value == the expected value. + If it the set was successful it returns true otherwise false. + + + + + Removes this attribute from the . Subsequent calls will return + null. + If you only want to remove the value and clear the while still keep it in + use with a value of null. + + + + Holds s which can be accessed via . + Implementations must be Thread-safe. + + + + Get the for the given . This method will never return + null, but may return an which does not have a value set yet. + + + + + Returns true if and only if the given exists in this + . + + + + Start is the inclusive start index to begin the subsequence. + End is the exclusive end index to end the subsequence. + + + + A singleton which is safe to compare via the == operator. Created and managed by + . + + + + Returns the unique number assigned to this . + + + Returns the name of this . + + + + + Exception thrown during instances where a reference count is used incorrectly + + + + + A handle associated with a that is returned by a + . + + + + + Returns the that created this handle. + + + + + Returns the which is associated with this handle. + + + + + Returns true if and only if the associated + with this handle has been expired. + + + + + Returns true if and only if the associated + with this handle has been canceled. + + + + + Attempts to cancel the associated with this handle. + If the task has been executed or canceled already, it will return with + no side effect. + + true if the cancellation completed successfully, otherwise false. + + + + Schedules s for one-time future execution in a background + thread. + + + + + Schedules the specified for one-time execution after the specified delay. + + a handle which is associated with the specified task + if this timer has been stopped already + if the pending timeouts are too many and creating new timeout + can cause instability in the system. + + + + Releases all resources acquired by this and cancels all + tasks which were scheduled but not executed yet. + + the handles associated with the tasks which were canceled by + this method + + + + A task which is executed after the delay specified with + . + + + + + Executed after the delay specified with + . + + a handle which is associated with this task + + + + Tries to call if the specified message implements + . If the specified message doesn't implement + , this method does nothing. + + + + + Tries to call if the specified message implements + . If the specified message doesn't implement + , this method does nothing. + + + + + Tries to call if the specified message implements + . + If the specified message doesn't implement , this method does nothing. + + + + + Tries to call if the specified message implements + . If the specified message doesn't implement + , this method does nothing. + + + + + Tries to call if the specified message implements + . If the specified message doesn't implement + , this method does nothing. + + + + + Tries to call if the specified message implements + . If the specified message doesn't implement + , this method does nothing. + + + + + Tries to call if the specified message implements + . If the specified message doesn't implement + , this method does nothing. Unlike , this + method catches an exception raised by and logs it, rather than + rethrowing it to the caller. It is usually recommended to use instead, unless + you absolutely need to swallow an exception. + + + + + Tries to call if the specified message implements + . If the specified message doesn't implement + , this method does nothing. Unlike , this + method catches an exception raised by and logs it, rather + than rethrowing it to the caller. It is usually recommended to use + instead, unless you absolutely need to swallow an exception. + + + + + Schedules the specified object to be released when the caller thread terminates. Note that this operation + is intended to simplify reference counting of ephemeral objects during unit tests. Do not use it beyond the + intended use case. + + + + + Schedules the specified object to be released when the caller thread terminates. Note that this operation + is intended to simplify reference counting of ephemeral objects during unit tests. Do not use it beyond the + intended use case. + + + + + String utility class. + + + + 2 - Quote character at beginning and end. + 5 - Extra allowance for anticipated escape characters that may be added. + + + + Converts the specified byte value into a 2-digit hexadecimal integer. + + + + + Converts the specified byte array into a hexadecimal value. + + + + + Converts the specified byte array into a hexadecimal value. + + + + + Converts the specified byte value into a hexadecimal integer. + + + + + The shortcut to SimpleClassName(o.GetType()). + + + + + The shortcut to SimpleClassName(o.GetType()). + + + + + Generates a simplified name from a . Similar to {@link Class#getSimpleName()}, but it works + fine + with anonymous classes. + + + + + Escapes the specified value, if necessary according to + RFC-4180. + + + The value which will be escaped according to + RFC-4180 + + + The value will first be trimmed of its optional white-space characters, according to + RFC-7230 + + the escaped value if necessary, or the value unchanged + + + + Time utility class. + + + + + Compare two timespan objects + + first timespan object + two timespan object + + + + Gets the system time. + + The system time. + +
+
diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Common.xml.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Common.xml.meta new file mode 100644 index 0000000..dc4e997 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Common.xml.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: c546fed3667084e4aaa511b1588f700f +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll new file mode 100644 index 0000000..720f3f2 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll.mdb b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll.mdb new file mode 100644 index 0000000..e1e3e35 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll.mdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll.mdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll.mdb.meta new file mode 100644 index 0000000..2b06c55 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll.mdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 39f9688dda8becc44b61267395d5e74b +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll.meta new file mode 100644 index 0000000..1113d4c --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.dll.meta @@ -0,0 +1,33 @@ +fileFormatVersion: 2 +guid: a0061affda6d09a429fd2674e5c26df4 +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + Any: + second: + enabled: 1 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.pdb b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.pdb new file mode 100644 index 0000000..37a51dc Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.pdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.pdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.pdb.meta new file mode 100644 index 0000000..c3de5e6 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.pdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: e52783c20d361e14287f5ac05e933943 +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.xml b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.xml new file mode 100644 index 0000000..3172afe --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.xml @@ -0,0 +1,591 @@ + + + + DotNetty.Handlers + + + + The {@link FlowControlHandler} ensures that only one message per {@code read()} is sent downstream. + + Classes such as {@link ByteToMessageDecoder} or {@link MessageToByteEncoder} are free to emit as + many events as they like for any given input. A channel's auto reading configuration doesn't usually + apply in these scenarios. This is causing problems in downstream {@link ChannelHandler}s that would + like to hold subsequent events while they're processing one event. It's a common problem with the + {@code HttpObjectDecoder} that will very often fire a {@code HttpRequest} that is immediately followed + by a {@code LastHttpContent} event. + +
{@code
+             ChannelPipeline pipeline = ...;
+            
+             pipeline.addLast(new HttpServerCodec());
+             pipeline.addLast(new FlowControlHandler());
+            
+             pipeline.addLast(new MyExampleHandler());
+            
+             class MyExampleHandler extends ChannelInboundHandlerAdapter {
+               @Override
+               public void channelRead(IChannelHandlerContext ctx, Object msg) {
+                 if (msg instanceof HttpRequest) {
+                   ctx.channel().config().setAutoRead(false);
+            
+                   // The FlowControlHandler will hold any subsequent events that
+                   // were emitted by HttpObjectDecoder until auto reading is turned
+                   // back on or Channel#read() is being called.
+                 }
+               }
+             }
+             }
+ + @see ChannelConfig#setAutoRead(bool) +
+ + Determine if the underlying {@link Queue} is empty. This method exists for + testing, debugging and inspection purposes and it is not Thread safe! + + + Releases all messages and destroys the {@link Queue}. + + + Dequeues one or many (or none) messages depending on the channel's auto + reading state and returns the number of messages that were consumed from + the internal queue. + + The {@code minConsume} argument is used to force {@code dequeue()} into + consuming that number of messages regardless of the channel's auto + reading configuration. + + @see #read(ChannelHandlerContext) + @see #channelRead(ChannelHandlerContext, Object) + + + + A that logs all events using a logging framework. + By default, all events are logged at DEBUG level. + + + + + Creates a new instance whose logger name is the fully qualified class + name of the instance with hex dump enabled. + + + + + Creates a new instance whose logger name is the fully qualified class + name of the instance + + the log level + + + + Creates a new instance with the specified logger name and with hex dump + enabled + + the class type to generate the logger for + + + + Creates a new instance with the specified logger name. + + the class type to generate the logger for + the log level + + + + Creates a new instance with the specified logger name using the default log level. + + the name of the class to use for the logger + + + + Creates a new instance with the specified logger name. + + the name of the class to use for the logger + the log level + + + + Returns the that this handler uses to log + + + + + Formats an event and returns the formatted message + + the name of the event + + + + Formats an event and returns the formatted message. + + the name of the event + the argument of the event + + + + Formats an event and returns the formatted message. This method is currently only used for formatting + + + the name of the event + the first argument of the event + the second argument of the event + + + + Generates the default log message of the specified event whose argument is a . + + + + + Generates the default log message of the specified event whose argument is a . + + + + + Generates the default log message of the specified event whose argument is an arbitrary object. + + + + + An that represents the idle state of a . + + + + + No data was received for a while. + + + + + No data was sent for a while. + + + + + No data was either received or sent for a while. + + + + + A user event triggered by when a is idle. + + + + + Constructor for sub-classes. + + the which triggered the event. + true if its the first idle event for the . + + + + Returns the idle state. + + The state. + + + + Returns true if this was the first event for the + + true if first; otherwise, false. + + + + Triggers an when a has not performed + read, write, or both operation for a while. + + + +

Supported idle states

+ + + + + + + + + + + + + + + + +
PropertyMeaning
readerIdleTimean whose state is + will be triggered when no read was performed for the specified period of + time. Specify 0 to disable. +
writerIdleTimean whose state is + will be triggered when no write was performed for the specified period of + time. Specify 0 to disable.
allIdleTimean whose state is + will be triggered when neither read nor write was performed for the + specified period of time. Specify 0 to disable.
+
+ + + + + + An example that sends a ping message when there is no outbound traffic + for 30 seconds. The connection is closed when there is no inbound traffic + for 60 seconds. + + + var bootstrap = new (); + + bootstrap.ChildHandler(new ActionChannelInitializer<ISocketChannel>(channel => + { + IChannelPipeline pipeline = channel.Pipeline; + + pipeline.AddLast("idleStateHandler", new (60, 30, 0); + pipeline.AddLast("myHandler", new MyHandler()); + } + + + Handler should handle the triggered by . + + + public class MyHandler : ChannelDuplexHandler + { + public override void UserEventTriggered( context, evt) + { + if(evt is ) + { + e = () evt; + if (e.State == .ReaderIdle) + { + ctx.close(); + } + else if(e.State == .WriterIdle) + { + ctx.writeAndFlush(new PingMessage()); + } + } + } + } + + + + + + +
+
+ + + Initializes a new instance firing s. + + + an whose state is + will be triggered when no read was performed for the specified + period of time. Specify 0 to disable. + + + an whose state is + will be triggered when no write was performed for the specified + period of time. Specify 0 to disable. + + + an whose state is + will be triggered when neither read nor write was performed for + the specified period of time. Specify 0 to disable. + + + + + + + + + + Initializes a new instance firing s. + + + whether or not the consumption of bytes should be taken into + consideration when assessing write idleness. The default is false. + + + an whose state is + will be triggered when no read was performed for the specified + period of time. Specify to disable. + + + an whose state is + will be triggered when no write was performed for the specified + period of time. Specify to disable. + + + an whose state is + will be triggered when neither read nor write was performed for + the specified period of time. Specify to disable. + + + + + Return the readerIdleTime that was given when instance this class in milliseconds. + + The reader idle time in millis. + + + + Return the writerIdleTime that was given when instance this class in milliseconds. + + The writer idle time in millis. + + + + Return the allIdleTime that was given when instance this class in milliseconds. + + The all idle time in millis. + + + + This method is visible for testing! + + + + + + This method is visible for testing! + + + + + + + + + + + Is called when an should be fired. This implementation calls + . + + Context. + Evt. + + + + Returns a . + + + + + + + + + + + + + + Returns true if and only if the + was constructed + with observeOutput enabled and there has been an observed change in the + between two consecutive calls of this method. + https://github.com/netty/netty/issues/6150 + + + + + + + + Raises a when no data was read within a certain + period of time. + +
+             The connection is closed when there is no inbound traffic
+             for 30 seconds.
+            
+             
+             
+             var bootstrap = new ();
+            
+             bootstrap.ChildHandler(new ActionChannelInitializer<ISocketChannel>(channel =>
+             {
+                 IChannelPipeline pipeline = channel.Pipeline;
+                 
+                 pipeline.AddLast("readTimeoutHandler", new (30);
+                 pipeline.AddLast("myHandler", new MyHandler());
+             } 
+             
+                        
+             
+             public class MyHandler : ChannelDuplexHandler 
+             {
+                 public override void ExceptionCaught( context,  exception)
+                 {
+                     if(exception is ) 
+                     {
+                         // do somethind
+                     }
+                     else
+                     {
+                         base.ExceptionCaught(context, cause);
+                     }
+                  }
+             }
+             
+             
+             
+ + + +
+
+ + + Initializes a new instance of the class. + + Timeout in seconds. + + + + Initializes a new instance of the class. + + Timeout. + + + + Is called when a read timeout was detected. + + Context. + + + + Raises a when a write operation cannot finish in a certain period of time. + + + + + The connection is closed when a write operation cannot finish in 30 seconds. + + + var bootstrap = new (); + + bootstrap.ChildHandler(new ActionChannelInitializer<ISocketChannel>(channel => + { + IChannelPipeline pipeline = channel.Pipeline; + + pipeline.AddLast("writeTimeoutHandler", new (30); + pipeline.AddLast("myHandler", new MyHandler()); + } + + + + public class MyHandler : ChannelDuplexHandler + { + public override void ExceptionCaught( context, exception) + { + if(exception is ) + { + // do somethind + } + else + { + base.ExceptionCaught(context, cause); + } + } + } + + + + + + + + + + + A doubly-linked list to track all WriteTimeoutTasks. + + + + + Initializes a new instance of the class. + + Timeout in seconds. + + + + Initializes a new instance of the class. + + Timeout. + + + + Is called when a write timeout was detected + + Context. + + + + Special exception which will get thrown if a packet is + received that not looks like a TLS/SSL record. A user can check for + this and so detect if one peer tries to + use secure and the other plain connection. + + + + Unwraps inbound SSL records. + + + + Creates a new event that indicates a successful handshake. + + + + + Creates a new event that indicates an unsuccessful handshake. + Use {@link #SUCCESS} to indicate a successful handshake. + + + + + Return {@code true} if the handshake was successful + + + + + Return the {@link Throwable} if {@link #isSuccess()} returns {@code false} + and so the handshake failed. + + + + Utilities for TLS packets. + + + change cipher spec + + + alert + + + handshake + + + application data + + + the length of the ssl record header (in bytes) + + + + Return how much bytes can be read out of the encrypted data. Be aware that this method will not increase + the readerIndex of the given . + + + The to read from. Be aware that it must have at least + bytes to read, + otherwise it will throw an . + + Offset to record start. + + The length of the encrypted packet that is included in the buffer. This will + return -1 if the given is not encrypted at all. + + +
+
diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.xml.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.xml.meta new file mode 100644 index 0000000..9e0bfdd --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Handlers.xml.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 89c10de1e938af64ca52675754efe833 +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll new file mode 100644 index 0000000..29692d7 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll.mdb b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll.mdb new file mode 100644 index 0000000..841be4e Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll.mdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll.mdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll.mdb.meta new file mode 100644 index 0000000..be095f3 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll.mdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: ecf53dc58e4338e4bb4a383e7499c042 +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll.meta new file mode 100644 index 0000000..374256e --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.dll.meta @@ -0,0 +1,33 @@ +fileFormatVersion: 2 +guid: 2cba5dfccbc5e7c45bf31c4e226fc3aa +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + Any: + second: + enabled: 1 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Transport.pdb b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.pdb new file mode 100644 index 0000000..544c837 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.pdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Transport.pdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.pdb.meta new file mode 100644 index 0000000..be226ca --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.pdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 92f1af146f5fa4d41b064e7023dbea80 +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Transport.xml b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.xml new file mode 100644 index 0000000..2a82cbd --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.xml @@ -0,0 +1,2464 @@ + + + + DotNetty.Transport + + + + + This is a helper class that makes it easy to bootstrap an . It supports method- + chaining to provide an easy way to configure the . + + When not used in a context, the methods + are useful for connectionless transports such as datagram (UDP). + + + + + Specifies the which will handle events for the being built. + + The which is used to handle all the events for the to-be-created . + The instance. + + + + Specifies the of which will be created. + + The which is used to create instances from. + The instance. + + + + Assigns the which is used to bind the local "end" to. + + The instance to bind the local "end" to. + The instance. + + + + Assigns the local which is used to bind the local "end" to. + This overload binds to a for any IP address on the local machine, given a specific port. + + The port to bind the local "end" to. + The instance. + + + + Assigns the local which is used to bind the local "end" to. + This overload binds to a for a given hostname and port. + + The hostname to bind the local "end" to. + The port to bind the local "end" to. + The instance. + + + + Assigns the local which is used to bind the local "end" to. + This overload binds to a for a given and port. + + The to bind the local "end" to. + The port to bind the local "end" to. + The instance. + + + + Allows the specification of a which is used for the + instances once they get created. Use a value of null to remove + a previously set . + + The to configure. + The value to set the given option. + + + + Allows specification of an initial attribute of the newly created . If the value is + null, the attribute of the specified key is removed. + + + + + Validates all the parameters. Sub-classes may override this, but should call the super method in that case. + + + + + Returns a deep clone of this bootstrap which has the identical configuration. This method is useful when making + multiple s with similar settings. Please note that this method does not clone the + deeply but shallowly, making the group a shared resource. + + + + + Creates a new and registers it with an . + + + + + Creates a new and binds it to the endpoint specified via the methods. + + The bound . + + + + Creates a new and binds it. + This overload binds to a for any IP address on the local machine, given a specific port. + + The port to bind the local "end" to. + The bound . + + + + Creates a new and binds it. + This overload binds to a for a given hostname and port. + + The hostname to bind the local "end" to. + The port to bind the local "end" to. + The bound . + + + + Creates a new and binds it. + This overload binds to a for a given and port. + + The to bind the local "end" to. + The port to bind the local "end" to. + The bound . + + + + Creates a new and binds it. + + The instance to bind the local "end" to. + The bound . + + + + Specifies the to use for serving the requests. + + The to use for serving requests. + The instance. + + + + Returns the configured or null if none is configured yet. + + + + + A that makes it easy to bootstrap an to use for clients. + + The methods are useful + in combination with connectionless transports such as datagram (UDP). For regular TCP connections, + please use the provided methods. + + + + + Sets the which will resolve the address of the unresolved named address. + + The which will resolve the address of the unresolved named address. + The instance. + + + + Assigns the remote to connect to once the method is called. + + The remote to connect to. + The instance. + + + + Assigns the remote to connect to once the method is called. + + The hostname of the endpoint to connect to. + The port at the remote host to connect to. + The instance. + + + + Assigns the remote to connect to once the method is called. + + The of the endpoint to connect to. + The port at the remote host to connect to. + The instance. + + + + Connects an to the remote peer. + + The . + + + + Connects an to the remote peer. + + The hostname of the endpoint to connect to. + The port at the remote host to connect to. + The . + + + + Connects an to the remote peer. + + The of the endpoint to connect to. + The port at the remote host to connect to. + The . + + + + Connects an to the remote peer. + + The remote to connect to. + The . + + + + Connects an to the remote peer. + + The remote to connect to. + The local to connect to. + The . + + + + Performs DNS resolution for the remote endpoint and connects to it. + + The remote to connect to. + The local to connect the remote to. + The . + + + + Returns a deep clone of this bootstrap which has the identical configuration except that it uses + the given . This method is useful when making multiple s with similar + settings. + + + + + A sub-class which allows easy bootstrapping of . + + + + + Specifies the which is used for the parent (acceptor) and the child (client). + + + + + Sets the for the parent (acceptor) and the child (client). These + 's are used to handle all the events and IO for + and 's. + + + + + Allows specification of a which is used for the + instances once they get created (after the acceptor accepted the ). Use a + value of null to remove a previously set . + + + + + Sets the specific with the given value on every child . + If the value is null, the is removed. + + + + + Sets the which is used to serve the request for the 's. + + + + + Returns the configured which will be used for the child channels or null + if none is configured yet. + + + + Cache for the string representation of this channel + + + + Creates a new instance. + + The parent of this channel. Pass null if there's no parent. + + + + Creates a new instance. + + The parent of this channel. Pass null if there's no parent. + An for the new channel. + + + + Resets the stored . + + + + + Returns a new instance. Subclasses may override this method to assign custom + s to s that use the constructor. + + A new instance. + + + Returns a new pipeline instance. + + + + Create a new instance which will be used for the life-time of the + + + + + + Returns the ID of this channel. + + + + + Returns true if and only if the specified object is identical + with this channel (i.e. this == o). + + + + + Returns the string representation of this channel. The returned string contains a hex dump of the + , the , and the of this + channel for easier identification. + + + + + implementation which sub-classes must extend and use. + + + + true if the channel has never been registered, false otherwise /// + + + + This method must NEVER be called directly, but be executed as an + extra task with a clean call stack instead. The reason for this + is that this method calls + directly, which might lead to an unfortunate nesting of independent inbound/outbound + events. See the comments input for more details. + + + + + Prepares to close the . If this method returns an , the + caller must call the method with a task that calls + on the returned . If this method returns null, + must be called from the caller thread. (i.e. ) + + + + + Checks whether a given is compatible with the . + + The to check compatibility. + + true if the given is compatible with this + instance, otherwise false. + + + + + Is called after the is registered with its as part of the + register process. Sub-classes may override this method. + + + + + Binds the to the . + + The to bind. + + + + Disconnects this from its remote peer. + + + + + Closes the . + + + + + Deregisters the from its . Sub-classes may override this + method. + + + + + ScheduleAsync a read operation. + + + + + Flush the content of the given buffer to the remote peer. + + + + + Invoked when a new message is added to a of this + , so that the implementation converts the message to + another. (e.g. heap buffer -> direct buffer). + + The message to be filtered. + The filtered message. + + + Neither nor was called. + + + was called. + + + was called. + + + + Makes best possible effort to detect if was + called + yet. If not return false and if called or could not detect return true. + If this method returns true we will not invoke the but just forward the + event. + This is needed as may already put the in the + linked-list + but not called + + + + + A skeletal server-side implementation. A server-side does not + allow the following operations: , + , , + . + + + + + Creates a new instance. + + + + + The that automatically increases and + decreases the predicted buffer size on feed back. +

+ It gradually increases the expected number of readable bytes if the previous + read fully filled the allocated buffer. It gradually decreases the expected + number of readable bytes if the read operation was not able to fill a certain + amount of the allocated buffer two times consecutively. Otherwise, it keeps + returning the same prediction. +

+
+ + + Creates a new predictor with the default parameters. With the default + parameters, the expected buffer size starts from 1024, does not + go down below 64, and does not go up above 65536. + + + + Creates a new predictor with the specified parameters. + the inclusive lower bound of the expected buffer size + the initial buffer size when no feed back was received + the inclusive upper bound of the expected buffer size + + + + that works as a wrapper for another providing affinity on call. + + + + + + + + Creates a new instance of . + + serving as an actual provider of s. + + + + If running in a context of an existing , this is returned. + Otherwise, is retrieved from underlying . + + + + + + + + A queue of write operations which are pending for later execution. It also updates the + writability of the associated , so that + the pending write operations are also considered to determine the writability. + + + + Returns true if there are no pending write operations left in this queue. + + + Returns the number of pending write operations. + + + Add the given msg and returns for completion of processing msg. + + + + Remove all pending write operation and fail them with the given . The messages will be + released + via . + + + + + Remove a pending write operation and fail it with the given . The message will be released + via + . + + + + + Remove all pending write operation and performs them via + . + + + if something was written and null if the + is empty. + + + + + Removes a pending write operation and performs it via + . + + + if something was written and null if the + is empty. + + + + + Removes a pending write operation and release it's message via . + + of the pending write or null if the queue is empty. + + + + Return the current message or null if empty. + + + + Holds all meta-data and construct the linked-list structure. + + + + It is a good starting point if your implementation needs to intercept operations and also + state updates. + + + + + A special which offers an easy way to initialize a once it was + registered to its . + + Implementations are most often used in the context of + and to setup the of a . + + Be aware that this class is marked as Sharable (via ) and so the implementation must be safe to be re-used. + + + + public class MyChannelInitializer extends { + public void InitChannel( channel) { + channel.Pipeline().AddLast("myHandler", new MyHandler()); + } + } + bootstrap = ...; + ... + bootstrap.childHandler(new MyChannelInitializer()); + ... + + + A sub-type of . + + + + This method will be called once the was registered. After the method returns this instance + will be removed from the of the . + + The which was registered. + + + Represents the properties of a implementation. + + + Create a new instance + + true if and only if the channel has the DisconnectAsync() operation + that allows a user to disconnect and then call + again, such as UDP/IP. + + + + Create a new instance + + true if and only if the channel has the DisconnectAsync operation + that allows a user to disconnect and then call + again, such as UDP/IP. + + + If a is in use, then this value will be + set for . Must be > 0. + + + + + Returns true if and only if the channel has the DisconnectAsync() operation + that allows a user to disconnect and then call again, + such as UDP/IP. + + + + + If a is in use, then this is the default value for + . + + + + + Returns the of the specified name. + + The type of option being retrieved. + The name of the desired option. + The matching instance. + + + + Returns the of the given pair: (, secondary name) + + The type of option being retrieved. + + A whose name will be used as the first part of the desired option's name. + + + A string representing the second part of the desired option's name. + + The matching instance. + + + + Checks whether a given exists. + + The name of the . + true if a exists for the given , otherwise false. + + + + Creates a new for the given . + + The type of option to create. + The name to associate with the new option. + Thrown if a for the given exists. + The new instance. + + + + Adds the given message to this . The given + will be notified once the message was written. + + The message to add to the buffer. + The size of the message. + The to notify once the message is written. + + + + Add a flush to this . This means all previous added messages are marked + as flushed and so you will be able to handle them. + + + + + Increments the number of pending bytes which will be written at some point. + This method is thread-safe! + + The number of bytes to increment the count by. + + + + Decrements the number of pending bytes which will be written at some point. + This method is thread-safe! + + The number of bytes to decrement the count by. + + + + Returns the current message to write, or null if nothing was flushed before and so is ready to be + written. + + + + + Notify the of the current message about writing progress. + + + + + Removes the current message, marks its as complete, and returns + true. If no flushed message exists at the time this method is called, it returns false to + signal that no more messages are ready to be handled. + + true if a message existed and was removed, otherwise false. + + + + Removes the current message, marks its as complete using the given + , and returns true. If no flushed message exists at the time this method is + called, it returns false to signal that no more messages are ready to be handled. + + The causing the message to be removed. + true if a message existed and was removed, otherwise false. + + + + Removes the fully written entries and updates the reader index of the partially written entry. + This operation assumes all messages in this buffer are instances. + + The number of bytes that have been written so far. + + + + Clears all ByteBuffer from the array so these can be GC'ed. + See https://github.com/netty/netty/issues/3837 + + + + + Returns a list of direct ArraySegment<byte>, if the currently pending messages are made of + instances only. will return the total number of + readable bytes of these buffers. + + Note that the returned array is reused and thus should not escape + . Refer to + for an example. + + + A list of ArraySegment<byte> buffers. + + + + Returns a list of direct ArraySegment<byte>, if the currently pending messages are made of + instances only. will return the total number of + readable bytes of these buffers. + + Note that the returned array is reused and thus should not escape + . Refer to + for an example. + + + The maximum amount of buffers that will be added to the return value. + A hint toward the maximum number of bytes to include as part of the return value. Note that this value maybe exceeded because we make a best effort to include at least 1 in the return value to ensure write progress is made. + A list of ArraySegment<byte> buffers. + + + + Returns the number of bytes that can be written out of the array that was + obtained via . This method MUST be called after + . + + + + + Returns true if and only if the total number of pending bytes () + did not exceed the write watermark of the and no user-defined writability flag + () has been set to false. + + + + + Returns true if and only if the user-defined writability flag at the specified index is set to + true. + + The index to check for user-defined writability. + + true if the user-defined writability flag at the specified index is set to true. + + + + + Sets a user-defined writability flag at the specified index. + + The index where a writability flag should be set. + Whether to set the index as writable or not. + + + + Returns the number of flushed messages in this . + + + + + Returns true if there are flushed messages in this , otherwise + false. + + + + + Gets the number of bytes that can be written before returns false. + This quantity will always be non-negative. If is already false, then 0 is + returned. + + + The number of bytes that can be written before returns false. + + + + + Gets the number of bytes that must be drained from the underlying buffer before + returns true. This quantity will always be non-negative. If is already + true, then 0 is returned. + + + The number of bytes that can be written before returns true. + + + + + Calls for each flushed message in this + until returns + false or there are no more flushed messages to process. + + + The intance to use to process each flushed message. + + + + + Will be called for each flushed message until it either there are no more flushed messages or this method returns false. + + The message to process. + true if the given message was successfully processed, otherwise false. + + + + Shared configuration for SocketAsyncChannel. Provides access to pre-configured resources like ByteBuf allocator and + IO buffer pools + + + + + This is the head of a linked list that is processed by and so + process all the pending . We only keep the head because it is expected that + the list is used infrequently and its size is small. Thus full iterations to do insertions is assumed to be + a good compromised to saving memory and tail management complexity. + + + + + Set to true once the is registered. Once set to true, the + value will never change. + + + + + Returns the string representation of this pipeline. + + + + + Removes all handlers from the pipeline one by one from tail (exclusive) to head (exclusive) to trigger + . Note that we traverse up the pipeline + before traversing down so that the handlers are removed after all events are + handled. + See: https://github.com/netty/netty/issues/3156 + + + + + Called once an hits the end of the without being + handled by the user in . + + + + + Called once a message hits the end of the without being handled by the user + in . This method is responsible + for calling on the given msg at some point. + + + + + Default implementation of which respects + + and also prevents overflow. + + + + Focuses on enforcing the maximum messages per read condition for . + + + Only is used. + + + + Returns the default implementation, which returns 0 for unknown messages. + + + + + Creates a new instance. + + The size which is returned for unknown messages. + + + + Create a new instance with an empty pipeline. + + + + + Create a new instance with an empty pipeline with the specified . + + The of this channel. + + + + Create a new instance with the pipeline initialized with the specified handlers. + + + The s that will be added to the + + + + Create a new instance with the pipeline initialized with the specified handlers. + The of this channel. + + false if this will delegate + to , true otherwise. + + + The s that will be added to the + + + + + Returns the which holds all of the s that + were received by this . + + + + + Returns the which holds all of the s that + were written by this . + + + + + Return received data from this . + + + + + Read data from the outbound. This may return null if nothing is readable. + + + + + Run all tasks (which also includes scheduled tasks) that are pending in the + for this . + + + + + Run all pending scheduled tasks in the for this . + + + The when the next scheduled task is ready to run. If no other task is + scheduled then it will return . + + + + + Write messages to the inbound of this + + The messages to be written. + true if the write operation did add something to the inbound buffer + + + + Write messages to the outbound of this . + + The messages to be written. + true if the write operation did add something to the inbound buffer + + + + Mark this as finished. Any further try to write data to it will fail. + + bufferReadable returns true + + + + Marks this as finished and releases all pending message in the inbound and outbound + buffer. Any futher try to write data to it will fail. + + true if any of the used buffers has something left to read, otherwise false. + + + + Marks this as finished. Any futher attempt to write data to it will fail. + + If true, all pending messages in the inbound and outbound buffer are released. + true if any of the used buffers has something left to read, otherwise false. + + + + Releases all buffered inbound messages. + + true if any were in the inbound buffer, otherwise false. + + + + Releases all buffered outbound messages. + + true if any were in the outbound buffer, otherwise false. + + + + Check to see if there was any and rethrow if so. + + + + + Ensure the is open and if not throw an exception. + + + + + A dummy implementation + + + + + Create a new instance with an empty pipeline. + + + + + Create a new instance with an empty pipeline with the specified . + + The of this channel. + + + + Create a new instance with the pipeline initialized with the specified handlers. + + + The s that will be added to the + + + + Create a new instance with the pipeline initialized with the specified handlers. + The of this channel. + + false if this will delegate + to , true otherwise. + + + The s that will be added to the + + + + + Returns the which holds all of the s that + were received by this . + + + + + Returns the which holds all of the s that + were written by this . + + + + + Return received data from this . + + + + + Read data from the outbound. This may return null if nothing is readable. + + + + + Write messages to the inbound of this + + The messages to be written. + true if the write operation did add something to the inbound buffer + + + + Write messages to the outbound of this . + + The messages to be written. + true if the write operation did add something to the inbound buffer + + + + Mark this as finished. Any further try to write data to it will fail. + + bufferReadable returns true + + + + Marks this as finished and releases all pending message in the inbound and outbound + buffer. Any futher try to write data to it will fail. + + true if any of the used buffers has something left to read, otherwise false. + + + + Marks this as finished. Any futher attempt to write data to it will fail. + + If true, all pending messages in the inbound and outbound buffer are released. + true if any of the used buffers has something left to read, otherwise false. + + + + Releases all buffered inbound messages. + + true if any were in the inbound buffer, otherwise false. + + + + Releases all buffered outbound messages. + + true if any were in the outbound buffer, otherwise false. + + + + Check to see if there was any and rethrow if so. + + + + + Ensure the is open and if not throw an exception. + + + + + The that always yields the same buffer + size prediction. This predictor ignores the feedback from the I/O thread. + + + + + Creates a new predictor that always returns the same prediction of + the specified buffer size. + + + + + Returns the name of this group. A group name is purely for helping + you to distinguish one group from others. + + + + + Return the of the which describe the nature of the + . + + + + + The of the was registered with its + . + + + + + The of the was unregistered from its + . + + + + + Gets called once the writable state of a changed. You can check the state with + . + + + + + Called once a bind operation is made. + + + The for which the bind operation is made. + + The to which it should bind. + An await-able task. + + + + Called once a connect operation is made. + + + The for which the connect operation is made. + + The to which it should connect. + The which is used as source on connect. + An await-able task. + + + + Called once a disconnect operation is made. + + + The for which the disconnect operation is made. + + An await-able task. + + + + Returns the which is used to execute an arbitrary task. + + + + + The unique name of the . + + + The name was used when the was added to the . + This name can also be used to access the registered from the + . + + + + + A was registered to its . This will result in having the + method called of the next + contained in the of the . + + The current . + + + + A was unregistered from its . This will result in having the + method called of the next + contained in the of the . + + The current . + + + + Request to bind to the given . + + This will result in having the method called of the next + contained in the of the + . + + + The to bind to. + An await-able task. + + + + Request to connect to the given . + + This will result in having the method called of the next + contained in the of the + . + + + The to connect to. + An await-able task. + + + + Request to connect to the given while also binding to the localAddress. + + This will result in having the method called of the next + contained in the of the + . + + + The to connect to. + The to bind to. + An await-able task. + + + + Request to disconnect from the remote peer. + + This will result in having the method called of the next + contained in the of the + . + + + An await-able task. + + + + Request to deregister from the previous assigned . + + This will result in having the method called of the next + contained in the of the + . + + + An await-able task. + + + + A list of s which handles or intercepts inbound events and outbound operations of + a . implements an advanced form of the + Intercepting Filter pattern + to give a user full control over how an event is handled and how the s in a + pipeline interact with each other. + Creation of a pipeline + Each channel has its own pipeline and it is created automatically when a new channel is created. + How an event flows in a pipeline + + The following diagram describes how I/O events are processed by s in a + typically. An I/O event is handled by a and is + forwarded by the which handled the event to the + which is placed right next to it. A can also trigger an arbitrary I/O event if + necessary. To forward or trigger an event, a calls the event propagation methods + defined in , such as + and . + + +
+                    I/O Request
+                    via  or
+                    {@link ChannelHandlerContext} 
+                    |
+                    +---------------------------------------------------+---------------+
+                    |                           ChannelPipeline         |               |
+                    |                                                  \|/              |
+                    |    +----------------------------------------------+----------+    |
+                    |    |                   ChannelHandler  N                     |    |
+                    |    +----------+-----------------------------------+----------+    |
+                    |              /|\                                  |               |
+                    |               |                                  \|/              |
+                    |    +----------+-----------------------------------+----------+    |
+                    |    |                   ChannelHandler N-1                    |    |
+                    |    +----------+-----------------------------------+----------+    |
+                    |              /|\                                  .               |
+                    |               .                                   .               |
+                    | ChannelHandlerContext.fireIN_EVT() ChannelHandlerContext.OUT_EVT()|
+                    |          [method call]                      [method call]         |
+                    |               .                                   .               |
+                    |               .                                  \|/              |
+                    |    +----------+-----------------------------------+----------+    |
+                    |    |                   ChannelHandler  2                     |    |
+                    |    +----------+-----------------------------------+----------+    |
+                    |              /|\                                  |               |
+                    |               |                                  \|/              |
+                    |    +----------+-----------------------------------+----------+    |
+                    |    |                   ChannelHandler  1                     |    |
+                    |    +----------+-----------------------------------+----------+    |
+                    |              /|\                                  |               |
+                    +---------------+-----------------------------------+---------------+
+                    |                                  \|/
+                    +---------------+-----------------------------------+---------------+
+                    |               |                                   |               |
+                    |       [ Socket.read() ]                    [ Socket.write() ]     |
+                    |                                                                   |
+                    |  Netty Internal I/O Threads (Transport Implementation)            |
+                    +-------------------------------------------------------------------+
+                
+
+ + An inbound event is handled by the s in the bottom-up direction as shown on the + left side of the diagram. An inbound event is usually triggered by the I/O thread on the bottom of the diagram + so that the s are notified when the state of a changes + (e.g. newly established connections and closed connections) or the inbound data was read from a remote peer. If + an inbound event goes beyond the at the top of the diagram, it is discarded and + logged, depending on your loglevel. + + + An outbound event is handled by the s in the top-down direction as shown on the + right side of the diagram. An outbound event is usually triggered by your code that requests an outbound I/O + operation, such as a write request and a connection attempt. If an outbound event goes beyond the + at the bottom of the diagram, it is handled by an I/O thread associated with the + . The I/O thread often performs the actual output operation such as + . + + Forwarding an event to the next handler + + As explained briefly above, a has to invoke the event propagation methods in + to forward an event to its next handler. Those methods include: +
    +
  • + Inbound event propagation methods: +
      +
    • +
    • +
    • +
    • +
    • +
    • +
    • +
    • +
    +
  • +
  • + Outbound event propagation methods: +
      +
    • +
    • +
    • +
    • +
    • +
    • +
    • +
    • +
    +
  • +
+
+ + and the following example shows how the event propagation is usually done: + + public class MyInboundHandler : + { + public override void ChannelActive( ctx) + { + Console.WriteLine("Connected!"); + ctx.FireChannelActive(); + } + } + + public class MyOutboundHandler : + { + public override async Task CloseAsync( ctx) + { + Console.WriteLine("Closing..."); + await ctx.CloseAsync(); + } + } + + + Building a pipeline + + A user is supposed to have one or more s in a pipeline to receive I/O events + (e.g. read) and to request I/O operations (e.g. write and close). For example, a typical server will have the + following handlers in each channel's pipeline, but your mileage may vary depending on the complexity and + characteristics of the protocol and business logic: +
    +
  1. Protocol Decoder - translates binary data (e.g. ) into a Java object.
  2. +
  3. Protocol Encoder - translates a Java object into binary data.
  4. +
  5. Business Logic Handler - performs the actual business logic (e.g. database access).
  6. +
+
+ + and it could be represented as shown in the following example: + + static readonly group = new (); + ... + pipeline = ch.Pipeline; + pipeline.AddLast("decoder", new MyProtocolDecoder()); + pipeline.AddLast("encoder", new MyProtocolEncoder()); + + // Tell the pipeline to run MyBusinessLogicHandler's event handler methods + // in a different thread than an I/O thread so that the I/O thread is not blocked by + // a time-consuming task. + // If your business logic is fully asynchronous or finished very quickly, you don't + // need to specify a group. + pipeline.AddLast(group, "handler", new MyBusinessLogicHandler()); + + + Thread safety + + An can be added or removed at any time because an + is thread safe. For example, you can insert an encryption handler when sensitive information is about to be + exchanged, and remove it after the exchange. + +
+
+ + + Inserts an at the first position of this pipeline. + + + The name of the handler to insert first. Pass null to let the name auto-generated. + + The to insert first. + The . + + Thrown if an entry with the same already exists. + + Thrown if the specified handler is null. + + + + Inserts a at the first position of this pipeline. + + + The which invokes the 's event handler methods. + + + The name of the handler to insert first. Pass null to let the name be auto-generated. + + The to insert first. + This . + + Thrown if an entry with the same already exists. + + Thrown if the specified handler is null. + + + + Appends an at the last position of this pipeline. + + + The name of the handler to append. Pass null to let the name be auto-generated. + + The to append. + This . + + Thrown if an entry with the same already exists. + + Thrown if the specified handler is null. + + + + Appends a at the last position of this pipeline. + + + The which invokes the 's event handler methods. + + + The name of the handler to append. Pass null to let the name be auto-generated. + + The to append. + This . + + Thrown if an entry with the same already exists. + + Thrown if the specified handler is null. + + + + Inserts a before an existing handler of this pipeline. + + The name of the existing handler. + + The name of the new handler being appended. Pass null to let the name be auto-generated. + + The to append. + This . + + Thrown if an entry with the same already exists, or if no match was found for the + given . + + Thrown if the specified handler is null. + + + + Inserts a before an existing handler of this pipeline. + + + The which invokes the 's event handler methods. + + The name of the existing handler. + + The name of the new handler being appended. Pass null to let the name be auto-generated. + + The to append. + This . + + Thrown if an entry with the same already exists, or if no match was found for the + given . + + Thrown if the specified handler is null. + + + + Inserts a after an existing handler of this pipeline. + + The name of the existing handler. + + The name of the new handler being appended. Pass null to let the name be auto-generated. + + The handler to insert after. + This . + + Thrown if an entry with the same already exists, or if no match was found for the + given . + + Thrown if the specified handler is null. + + + + Inserts a after an existing handler of this pipeline. + + + The which invokes the 's event handler methods. + + The name of the existing handler. + + The name of the new handler being appended. Pass null to let the name be auto-generated. + + The handler to insert after. + This . + + Thrown if an entry with the same already exists, or if no match was found for the + given . + + Thrown if the specified handler is null. + + + + Inserts multiple s at the first position of this pipeline. + + The s to insert. + This . + + + + Inserts multiple s at the first position of this pipeline. + + + The which invokes the ' event handler methods. + + The s to insert. + This . + + + + Inserts multiple s at the last position of this pipeline. + + The s to insert. + This . + + + + Inserts multiple s at the last position of this pipeline. + + + The which invokes the ' event handler methods. + + The s to insert. + This . + + + + Removes the specified from this pipeline. + + The to remove. + This . + Thrown if the specified handler was not found. + + + + Removes the with the specified name from this pipeline. + + The name under which the was stored. + The removed . + + Thrown if there's no such handler with the specified name in this pipeline. + + + + + Removes the of the specified type from this pipeline. + + The type of handler to remove. + The removed . + Thrown if there's no handler of the specified type in this pipeline. + + + + Removes the first in this pipeline. + + The removed . + Thrown if this pipeline is empty. + + + + Removes the last in this pipeline. + + The removed . + Thrown if this pipeline is empty. + + + + Replaces the specified with a new handler in this pipeline. + + The to be replaced. + + The name of the new handler being inserted. Pass null to let the name be auto-generated. + + The new to be inserted. + This . + + Thrown if an entry with the same already exists, or if the + was not found. + + + + + Replaces the of the specified name with a new handler in this pipeline. + + The name of the to be replaced. + + The name of the new handler being inserted. Pass null to let the name be auto-generated. + + The new to be inserted. + The that was replaced. + + Thrown if an entry with the same already exists, or if no match was found for + the given . + + + + + Replaces the of the specified type with a new handler in this pipeline. + + The type of the handler to be removed. + + The name of the new handler being inserted. Pass null to let the name be auto-generated. + + The new to be inserted. + The that was replaced. + + Thrown if an entry with the same already exists, or if no match was found for + the given type. + + + + + Returns the first in this pipeline. + + The first handler in the pipeline, or null if the pipeline is empty. + + + + Returns the context of the first in this pipeline. + + + The context of the first handler in the pipeline, or null if the pipeline is empty. + + + + + Returns the last in this pipeline. + + The last handler in the pipeline, or null if the pipeline is empty. + + + + Returns the context of the last in this pipeline. + + + The context of the last handler in the pipeline, or null if the pipeline is empty. + + + + + Returns the with the specified name in this pipeline. + + The name of the desired . + + The handler with the specified name, or null if there's no such handler in this pipeline. + + + + + Returns the of the specified type in this pipeline. + + The type of handler to retrieve. + + The handler with the specified type, or null if there's no such handler in this pipeline. + + + + + Returns the context object of the specified in this pipeline. + + The whose context should be retrieved. + + The context object of the specified handler, or null if there's no such handler in this pipeline. + + + + + Returns the context object of the with the specified name in this pipeline. + + The name of the whose context should be retrieved. + + The context object of the handler with the specified name, or null if there's no such handler in + this pipeline. + + + + + Returns the context object of the of the specified type in this pipeline. + + The type of whose context should be retrieved. + + The context object of the handler with the specified type, or null if there's no such handler in + this pipeline. + + + + + Returns the that this pipeline is attached to. + Returns null if this pipeline is not attached to any channel yet. + + + + + An was registered to its . + This will result in having the method + called of the next contained in the of the + . + + This . + + + + An was unregistered from its . + This will result in having the method + called of the next contained in the of the + . + + This . + + + + An is active now, which means it is connected. + This will result in having the method + called of the next contained in the of the + . + + This . + + + + An is inactive now, which means it is closed. + This will result in having the method + called of the next contained in the of the + . + + This . + + + + An received an in one of its inbound operations. + This will result in having the method + called of the next contained in the of the + . + + The that was caught. + This . + + + + An received an user defined event. + This will result in having the method + called of the next contained in the of the + . + + The user-defined event that was triggered. + This . + + + + An received a message. + This will result in having the method + called of the next contained in the of the + . + + The message that was received. + This . + + + + An completed a message after reading it. + This will result in having the method + called of the next contained in the of the + . + + This . + + + + Triggers an event to the next + in the . + + This . + + + + Request to bind to the given . + + This will result in having the method called of the next + contained in the of the + . + + + + + + Request to connect to the given . + + This will result in having the method called of the next + contained in the of the + . + + + The remote to connect to. + An await-able task. + + + + Request to connect to the given . + + This will result in having the method called of the next + contained in the of the + . + + + The remote to connect to. + The local to bind. + An await-able task. + + + + Request to disconnect from the remote peer. + + This will result in having the method called of the next + contained in the of the + . + + + An await-able task. + + + + Request to close the . After it is closed it is not possible to reuse it again. + + This will result in having the method called of the next + contained in the of the + . + + + An await-able task. + + + + Request to deregister the bound this from the + previous assigned . + + This will result in having the method called of the next + contained in the of the + . + + + An await-able task. + + + + Request to Read data from the into the first inbound buffer, triggers an + event if data was read, and triggers a + event so the handler can decide whether to continue + reading. If there's a pending read operation already, this method does nothing. + + This will result in having the method called of the next + contained in the of the + . + + + This . + + + + Request to write a message via this . + This method will not request to actual flush, so be sure to call + once you want to request to flush all pending data to the actual transport. + + An await-able task. + + + + Request to flush all pending messages. + + This . + + + + Shortcut for calling both and . + + + + + specialized to handle I/O operations of assigned s. + + + + + Parent . + + + + + + specialized for handling s. + + + + + Returns list of owned event loops. + + + + + Returns one of owned event loops. + + + + + Register the for this event loop. + + The to register. + The register task. + + + + that limits the number of read operations that will be attempted when a read + operation + is attempted by the event loop. + + + + + Gets or sets the maximum number of messages to read per read loop. + If this value is greater than 1, an event loop might attempt to read multiple times to procure multiple messages. + + + + + Creates a new handle. The handle provides the actual operations. + + + + + Calculates the size of the given message. + + The message for which the size should be calculated. + The size in bytes. The returned size must be >= 0 + + + + Allocates a new receive buffer whose capacity is probably large enough to read all inbound data and small enough + not to waste its space. + + + + + Creates a new handle. The handle provides the actual operations and keeps the internal information which is + required for predicting an optimal buffer capacity. + + + + + Creates a new receive buffer whose capacity is probably large enough to read all inbound data and small + enough not to waste its space. + + + + + Similar to except that it does not allocate anything but just tells the + capacity. + + + + + Reset any counters that have accumulated and recommend how many messages/bytes should be read for the next + read loop. +

+ This may be used by to determine if the read operation should complete. +

+ This is only ever a hint and may be ignored by the implementation. +
+ The channel configuration which may impact this object's behavior. +
+ + Increment the number of messages that have been read for the current read loop. + The amount to increment by. + + + + Get or set the bytes that have been read for the last read operation. + This may be used to increment the number of bytes that have been read. + + + Returned value may be negative if an read error + occurs. If a negative value is seen it is expected to be return on the next set to + . A negative value will signal a termination condition enforced externally + to this class and is not required to be enforced in . + + + + Get or set how many bytes the read operation will (or did) attempt to read. + + + Determine if the current read loop should should continue. + true if the read loop should continue reading. false if the read loop is complete. + + + Signals read completion. + + + + A that accepts an incoming connection attempt and creates its child + s by accepting them. is a good example. + + + + + A for the local transport. + + + + + A for the local transport which allows in VM communication. + + + + + A factory method for s. Users may override it to create custom instances of s. + + An existing that will act as a peer for the new channel. + The newly created instance. + + + + backed by a set of instances. + + + + + + + + + + + + + Creates a new instance of . + + + Creates a new instance of . + + + Creates a new instance of . + + + Creates a new instance of . + + + + + + + + + + + + + A queue of write operations which are pending for later execution. It also updates the writability of the + associated (), so that the pending write operations are + also considered to determine the writability. + + + + + Returns true if there are no pending write operations left in this queue. + + + + + Returns the number of pending write operations. + + + + + Adds the given message to this . + + The message to add to the . + An await-able task. + + + + Removes all pending write operations, and fail them with the given . The messages + will be released via . + + The to fail with. + + + + Remove a pending write operation and fail it with the given . The message will be + released via . + + The to fail with. + + + + Removes all pending write operation and performs them via + + An await-able task. + + + + Removes a pending write operation and performs it via . + + An await-able task. + + + + Removes a pending write operation and releases it's message via + . + + + The of the pending write, or null if the queue is empty. + + + + + Return the current message, or null if the queue is empty. + + + + + Holds all meta-data and constructs the linked-list structure. + + + + + implementation based on . + + + + Creates a new instance of . + + + Creates a new instance of . + + + Creates a new instance of . + + + Creates a new instance of . + + + Creates a new instance of . + + + Creates a new instance of . + + + Creates a new instance of . + + + Creates a new instance of . + + + + + + + + + + base class for s that operate on bytes. + + + + Create a new instance + the parent by which this instance was created. May be null + the underlying on which it operates + + + + Reads bytes into the given and returns the number of bytes that were read. + + The to read bytes into. + The number of bytes that were read into the buffer. + + + + Writes bytes from the given to the underlying . + + The from which the bytes should be written. + The number of bytes that were written from the buffer. + + + + Set read pending to false. + + + + state before modification + + + PORT NOTE: matches behavior of NioEventLoop.processSelectedKey + + + + Finish connect + + + + + Read from underlying {@link SelectableChannel} + + + + + Connect to the remote peer + + + + + Finish the connect + + + + + base class for s that operate on messages. + + + + + Creates a new instance. + + The parent . Pass null if there's no parent. + The used by the for communication. + + + + Returns true if we should continue the write loop on a write error. + + + + + Reads messages into the given list and returns the amount which was read. + + The list into which message objects should be inserted. + The number of messages which were read. + + + + Writes a message to the underlying . + + The message to be written. + The destination channel buffer for the message. + true if the message was successfully written, otherwise false. + + + + Special event which will be fired and passed to the + methods once the input of an was shutdown and the + property returns true. + + + + + Singleton instance to use. + + + + + The default implementation. + + + + + Creates a new instance. + + + + + The default implementation. + + + + + A TCP/IP which accepts incoming TCP/IP connections. + + + + + A implementation which uses Socket-based implementation to accept new + connections. + + + + + Create a new instance + + + + + Create a new instance + + + + + Create a new instance using the given . + + + + + which uses Socket-based implementation. + + + + Create a new instance + + + Create a new instance + + + Create a new instance using the given . + + + Create a new instance + + the which created this instance or null if it was created by the + user + + the which will be used + + + + Marks the specified as success. If the + is done already, logs a message. + + The to complete. + The to use to log a failure message. + + + + Marks the specified as failure. If the + is done already, log a message. + + The to complete. + The to fail the with. + The to use to log a failure message. + +
+
diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Transport.xml.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.xml.meta new file mode 100644 index 0000000..fa202eb --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Transport.xml.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 312af1fcd31a2ef4a8ef4ed76e440da1 +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll new file mode 100644 index 0000000..f41e61f Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll.mdb b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll.mdb new file mode 100644 index 0000000..dcdb50a Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll.mdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll.mdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll.mdb.meta new file mode 100644 index 0000000..82d5aac --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll.mdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: cdb39fee38ded5b408550015cdae617b +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll.meta new file mode 100644 index 0000000..fe4f3fa --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.dll.meta @@ -0,0 +1,33 @@ +fileFormatVersion: 2 +guid: 4db51436488319e45a2ca4dabf4edc99 +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + Any: + second: + enabled: 1 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Unity.pdb b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.pdb new file mode 100644 index 0000000..886cdd1 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.pdb differ diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Unity.pdb.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.pdb.meta new file mode 100644 index 0000000..4f5c9d8 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.pdb.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: ffb2d4af7be12054db81762b2d585887 +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Unity.xml b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.xml new file mode 100644 index 0000000..dda424b --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.xml @@ -0,0 +1,8 @@ + + + + DotNetty.Unity + + + + diff --git a/Runtime/csharp-kcp/Plugins/DotNetty.Unity.xml.meta b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.xml.meta new file mode 100644 index 0000000..bf6366e --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/DotNetty.Unity.xml.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 6957bfcee5d34ae4987f8c9817282acd +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/LICENSE.txt b/Runtime/csharp-kcp/Plugins/LICENSE.txt new file mode 100644 index 0000000..785da82 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/LICENSE.txt @@ -0,0 +1,45 @@ +MIT License + +Copyright (c) 2018 Clark Yang + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +Copyright (c) Microsoft Corporation +All rights reserved. + +MIT License + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Runtime/csharp-kcp/Plugins/LICENSE.txt.meta b/Runtime/csharp-kcp/Plugins/LICENSE.txt.meta new file mode 100644 index 0000000..abddea6 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/LICENSE.txt.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: 38df922502fef2a4d8e81048b1d7ef61 +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.dll b/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.dll new file mode 100644 index 0000000..51974c7 Binary files /dev/null and b/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.dll differ diff --git a/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.dll.meta b/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.dll.meta new file mode 100644 index 0000000..e40bae4 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.dll.meta @@ -0,0 +1,33 @@ +fileFormatVersion: 2 +guid: 1b39c08ffc7e2004495f915f82d909ef +PluginImporter: + externalObjects: {} + serializedVersion: 2 + iconMap: {} + executionOrder: {} + defineConstraints: [] + isPreloaded: 0 + isOverridable: 0 + isExplicitlyReferenced: 0 + validateReferences: 1 + platformData: + - first: + Any: + second: + enabled: 1 + settings: {} + - first: + Editor: Editor + second: + enabled: 0 + settings: + DefaultValueInitialized: true + - first: + Windows Store Apps: WindowsStoreApps + second: + enabled: 0 + settings: + CPU: AnyCPU + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.xml b/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.xml new file mode 100644 index 0000000..84c0213 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.xml @@ -0,0 +1,200 @@ + + + System.Runtime.CompilerServices.Unsafe + + + + Contains generic, low-level functionality for manipulating pointers. + + + Adds an element offset to the given reference. + The reference to add the offset to. + The offset to add. + The type of reference. + A new reference that reflects the addition of offset to pointer. + + + Adds an element offset to the given reference. + The reference to add the offset to. + The offset to add. + The type of reference. + A new reference that reflects the addition of offset to pointer. + + + Adds a byte offset to the given reference. + The reference to add the offset to. + The offset to add. + The type of reference. + A new reference that reflects the addition of byte offset to pointer. + + + Determines whether the specified references point to the same location. + The first reference to compare. + The second reference to compare. + The type of reference. + true if left and right point to the same location; otherwise, false. + + + Casts the given object to the specified type. + The object to cast. + The type which the object will be cast to. + The original object, casted to the given type. + + + Reinterprets the given reference as a reference to a value of type TTo. + The reference to reinterpret. + The type of reference to reinterpret.. + The desired type of the reference. + A reference to a value of type TTo. + + + Returns a pointer to the given by-ref parameter. + The object whose pointer is obtained. + The type of object. + A pointer to the given value. + + + Reinterprets the given location as a reference to a value of type T. + The location of the value to reference. + The type of the interpreted location. + A reference to a value of type T. + + + Determines the byte offset from origin to target from the given references. + The reference to origin. + The reference to target. + The type of reference. + Byte offset from origin to target i.e. target - origin. + + + Copies a value of type T to the given location. + The location to copy to. + A reference to the value to copy. + The type of value to copy. + + + Copies a value of type T to the given location. + The location to copy to. + A pointer to the value to copy. + The type of value to copy. + + + Copies bytes from the source address to the destination address. + The destination address to copy to. + The source address to copy from. + The number of bytes to copy. + + + Copies bytes from the source address to the destination address. + The destination address to copy to. + The source address to copy from. + The number of bytes to copy. + + + Copies bytes from the source address to the destination address +without assuming architecture dependent alignment of the addresses. + The destination address to copy to. + The source address to copy from. + The number of bytes to copy. + + + Copies bytes from the source address to the destination address +without assuming architecture dependent alignment of the addresses. + The destination address to copy to. + The source address to copy from. + The number of bytes to copy. + + + Initializes a block of memory at the given location with a given initial value. + The address of the start of the memory block to initialize. + The value to initialize the block to. + The number of bytes to initialize. + + + Initializes a block of memory at the given location with a given initial value. + The address of the start of the memory block to initialize. + The value to initialize the block to. + The number of bytes to initialize. + + + Initializes a block of memory at the given location with a given initial value +without assuming architecture dependent alignment of the address. + The address of the start of the memory block to initialize. + The value to initialize the block to. + The number of bytes to initialize. + + + Initializes a block of memory at the given location with a given initial value +without assuming architecture dependent alignment of the address. + The address of the start of the memory block to initialize. + The value to initialize the block to. + The number of bytes to initialize. + + + Reads a value of type T from the given location. + The location to read from. + The type to read. + An object of type T read from the given location. + + + Reads a value of type T from the given location +without assuming architecture dependent alignment of the addresses. + The location to read from. + The type to read. + An object of type T read from the given location. + + + Reads a value of type T from the given location +without assuming architecture dependent alignment of the addresses. + The location to read from. + The type to read. + An object of type T read from the given location. + + + Returns the size of an object of the given type parameter. + The type of object whose size is retrieved. + The size of an object of type T. + + + Subtracts an element offset from the given reference. + The reference to subtract the offset from. + The offset to subtract. + The type of reference. + A new reference that reflects the subraction of offset from pointer. + + + Subtracts an element offset from the given reference. + The reference to subtract the offset from. + The offset to subtract. + The type of reference. + A new reference that reflects the subraction of offset from pointer. + + + Subtracts a byte offset from the given reference. + The reference to subtract the offset from. + + The type of reference. + A new reference that reflects the subraction of byte offset from pointer. + + + Writes a value of type T to the given location. + The location to write to. + The value to write. + The type of value to write. + + + Writes a value of type T to the given location +without assuming architecture dependent alignment of the addresses. + The location to write to. + The value to write. + The type of value to write. + + + Writes a value of type T to the given location +without assuming architecture dependent alignment of the addresses. + The location to write to. + The value to write. + The type of value to write. + + + \ No newline at end of file diff --git a/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.xml.meta b/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.xml.meta new file mode 100644 index 0000000..0572801 --- /dev/null +++ b/Runtime/csharp-kcp/Plugins/System.Runtime.CompilerServices.Unsafe.xml.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: ee1b2eabd1c88a0439ed7e7bb6549555 +TextScriptImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp.meta b/Runtime/csharp-kcp/base-kcp.meta new file mode 100644 index 0000000..55d60f1 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: d22813da2a04d7d4c80cb6234a7f1af5 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/DelayPacket.cs b/Runtime/csharp-kcp/base-kcp/DelayPacket.cs new file mode 100644 index 0000000..6d840c1 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/DelayPacket.cs @@ -0,0 +1,42 @@ +using System; +using DotNetty.Buffers; + +namespace base_kcp +{ + public class DelayPacket + { + private long ts; + private IByteBuffer ptr; + + + public void init(IByteBuffer src) + { + this.ptr = src.RetainedSlice(); + } + + + public long getTs() + { + return ts; + } + + public void setTs(long ts) + { + this.ts = ts; + } + + public IByteBuffer getPtr() + { + return ptr; + } + + public void setPtr(IByteBuffer ptr) + { + this.ptr = ptr; + } + + public void Release(){ + ptr.Release(); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/DelayPacket.cs.meta b/Runtime/csharp-kcp/base-kcp/DelayPacket.cs.meta new file mode 100644 index 0000000..3730466 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/DelayPacket.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 3bc23bb97905852478ba13fd802217ea +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/Kcp.cs b/Runtime/csharp-kcp/base-kcp/Kcp.cs new file mode 100644 index 0000000..1c87f09 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/Kcp.cs @@ -0,0 +1,1841 @@ +using System; +using System.Collections; +using System.Collections.Generic; +using System.Linq; +using DotNetty.Buffers; +using fec; +using Object = System.Object; + +namespace base_kcp +{ + public class Kcp + { + /** + * no delay min rto + */ + public const int IKCP_RTO_NDL = 30; + + /** + * normal min rto + */ + public const int IKCP_RTO_MIN = 100; + + public const int IKCP_RTO_DEF = 200; + + public const int IKCP_RTO_MAX = 60000; + + /** + * cmd: push data + */ + public const byte IKCP_CMD_PUSH = 81; + + /** + * cmd: ack + */ + public const byte IKCP_CMD_ACK = 82; + + /** + * cmd: window probe (ask) + * 询问对方当前剩余窗口大小 请求 + */ + public const byte IKCP_CMD_WASK = 83; + + /** + * cmd: window size (tell) + * 返回本地当前剩余窗口大小 + */ + public const byte IKCP_CMD_WINS = 84; + + /** + * need to send IKCP_CMD_WASK + */ + public const int IKCP_ASK_SEND = 1; + + /** + * need to send IKCP_CMD_WINS + */ + public const int IKCP_ASK_TELL = 2; + + public const int IKCP_WND_SND = 32; + + public const int IKCP_WND_RCV = 32; + + public const int IKCP_MTU_DEF = 1400; + + public const int IKCP_INTERVAL = 100; + + public int IKCP_OVERHEAD = 24; + + public const int IKCP_DEADLINK = 20; + + public const int IKCP_THRESH_INIT = 2; + + public const int IKCP_THRESH_MIN = 2; + + /** + * 7 secs to probe window size + */ + public const int IKCP_PROBE_INIT = 7000; + + /** + * up to 120 secs to probe window + */ + public const int IKCP_PROBE_LIMIT = 120000; + + public const int IKCP_SN_OFFSET = 12; + + + private int ackMaskSize = 0; + + /**会话id**/ + private int conv; + + /**最大传输单元**/ + private int mtu = IKCP_MTU_DEF; + + /**最大分节大小 mtu减去头等部分**/ + private int mss = 0; + + /**状态**/ + private int state; + + /**已发送但未确认**/ + private long sndUna; + + /**下次发送下标**/ + private long sndNxt; + + /**下次接收下标**/ + private long rcvNxt; + + /**上次ack时间**/ + private long tsLastack; + + /**慢启动门限**/ + private int ssthresh = IKCP_THRESH_INIT; + + /**RTT(Round Trip Time)**/ + private int rxRttval; + + /**SRTT平滑RTT*/ + private int rxSrtt; + + /**RTO重传超时*/ + private int rxRto = IKCP_RTO_DEF; + + /**MinRTO最小重传超时*/ + private int rxMinrto = IKCP_RTO_MIN; + + /**发送窗口**/ + private int sndWnd = IKCP_WND_SND; + + /**接收窗口**/ + private int rcvWnd = IKCP_WND_RCV; + + /**当前对端可接收窗口**/ + private int rmtWnd = IKCP_WND_RCV; + + /**拥塞控制窗口**/ + private int cwnd; + + /**探测标志位**/ + private int probe; + + ///**当前时间**/ + //private long current; + /**间隔**/ + private int interval = IKCP_INTERVAL; + + /**发送**/ + private long tsFlush = IKCP_INTERVAL; + + /**是否无延迟 0不启用;1启用**/ + private bool nodelay; + + /**状态是否已更新**/ + private bool updated; + + /**探测时间**/ + private long tsProbe; + + /**探测等待**/ + private int probeWait; + + /**死连接 重传达到该值时认为连接是断开的**/ + private int deadLink = IKCP_DEADLINK; + + /**拥塞控制增量**/ + private int incr; + + /**收到包立即回ack**/ + private bool ackNoDelay; + + /**待发送窗口窗口**/ + private Queue sndQueue = new Queue(); + + /**收到后有序的队列**/ + private LinkedList rcvQueue = new LinkedList(); + + /**发送后待确认的队列**/ + public LinkedList sndBuf = new LinkedList(); + + /**收到的消息 无序的**/ + private LinkedList rcvBuf = new LinkedList(); + + private long[] acklist = new long[8]; + + private int ackcount; + + private Object user; + + /**是否快速重传 默认0关闭,可以设置2(2次ACK跨越将会直接重传)**/ + private int fastresend; + + /**是否关闭拥塞控制窗口**/ + private bool nocwnd; + + /**是否流传输**/ + private bool stream; + + /**头部预留长度 为fec checksum准备**/ + private int reserved; + + private KcpOutput output; + + private IByteBufferAllocator byteBufAllocator = PooledByteBufferAllocator.Default; + + /**ack二进制标识**/ + private long ackMask; + private long lastRcvNxt; + + + private static long long2Uint(long n) + { + return n & 0x00000000FFFFFFFFL; + } + + private static int ibound(int lower, int middle, int upper) + { + return Math.Min(Math.Max(lower, middle), upper); + } + + private static int itimediff(long later, long earlier) + { + return (int) (later - earlier); + } + + private static void outPut(IByteBuffer data, Kcp kcp) + { +// if (log.isDebugEnabled()) { +// log.debug("{} [RO] {} bytes", kcp, data.readableBytes()); +// } + if (data.ReadableBytes == 0) + { + return; + } + + kcp.output.outPut(data, kcp); + } + + + private static void encodeSeg(IByteBuffer buf, Segment seg) + { + int offset = buf.WriterIndex; + + buf.WriteIntLE(seg.Conv); + buf.WriteByte(seg.Cmd); + buf.WriteByte(seg.Frg); + buf.WriteShortLE(seg.Wnd); + buf.WriteIntLE((int) seg.Ts); + buf.WriteIntLE((int) seg.Sn); + buf.WriteIntLE((int) seg.Una); + buf.WriteIntLE(seg.Data.ReadableBytes); + switch (seg.AckMaskSize) + { + case 8: + buf.WriteByte((int) seg.AckMask); + break; + case 16: + buf.WriteShortLE((int) seg.AckMask); + break; + case 32: + buf.WriteIntLE((int) seg.AckMask); + break; + case 64: + buf.WriteLongLE(seg.AckMask); + break; + } + + Snmp.snmp.OutSegs++; + } + + public Kcp(int conv, KcpOutput output) + { + this.conv = conv; + this.output = output; + this.mss = mtu - IKCP_OVERHEAD; + } + + public void release() + { + release(sndBuf); + release(rcvBuf); + release(sndQueue); + release(rcvQueue); + } + + private void release(ICollection segQueue) + { + foreach (Segment seg in segQueue) + { + seg.recycle(true); + } + } + + private IByteBuffer createFlushByteBuf() + { + return byteBufAllocator.DirectBuffer(this.mtu); + } + + public IByteBuffer mergeRecv() + { + if (rcvQueue.Count == 0) + { + return null; + } + + int peek = peekSize(); + + if (peek < 0) + { + return null; + } + + + bool recover = rcvQueue.Count >= rcvWnd; + + IByteBuffer byteBuf = null; + + // merge fragment + int len = 0; + + var itr = rcvQueue.First; + while (itr != null) + { + Segment seg = itr.Value; + var next = itr.Next; + rcvQueue.Remove(itr); + itr = next; + + len += seg.Data.ReadableBytes; + int fragment = seg.Frg; + if (byteBuf == null) + { + if (fragment == 0) + { + byteBuf = seg.Data; + seg.recycle(false); + break; + } + + byteBuf = byteBufAllocator.DirectBuffer(len); + } + + byteBuf.WriteBytes(seg.Data); + seg.recycle(true); + if (fragment == 0) + { + break; + } + } + + // move available data from rcv_buf -> rcv_queue + moveRcvData(); + // fast recover + if (rcvQueue.Count < rcvWnd && recover) + { + // ready to send back IKCP_CMD_WINS in ikcp_flush + // tell remote my window size + probe |= IKCP_ASK_TELL; + } + + return byteBuf; + } + + + /** + * 1,判断是否有完整的包,如果有就抛给下一层 + * 2,整理消息接收队列,判断下一个包是否已经收到 收到放入rcvQueue + * 3,判断接收窗口剩余是否改变,如果改变记录需要通知 + * @param bufList + * @return + */ + public int recv(List bufList) + { + if (rcvQueue.Count == 0) + { + return -1; + } + + int peek = peekSize(); + + if (peek < 0) + { + return -2; + } + + //接收队列长度大于接收窗口?比如接收窗口是32个包,目前已经满32个包了,需要在恢复的时候告诉对方 + bool recover = rcvQueue.Count >= rcvWnd; + + // merge fragment + int len = 0; + + + var itr = rcvQueue.First; + + while (itr != null) + { + Segment seg = itr.Value; + var next = itr.Next; + rcvQueue.Remove(itr); + itr = next; + + len += seg.Data.ReadableBytes; + bufList.Add(seg.Data); + + int fragment = seg.Frg; + + seg.recycle(false); + + if (fragment == 0) + { + break; + } + } + + // move available data from rcv_buf -> rcv_queue + moveRcvData(); + + // fast recover接收队列长度小于接收窗口,说明还可以接数据,已经恢复了,在下次发包的时候告诉对方本方的窗口 + if (rcvQueue.Count < rcvWnd && recover) + { + // ready to send back IKCP_CMD_WINS in ikcp_flush + // tell remote my window size + probe |= IKCP_ASK_TELL; + } + + return len; + } + + + /** + * check the size of next message in the recv queue + * 检查接收队列里面是否有完整的一个包,如果有返回该包的字节长度 + * @return -1 没有完整包, >0 一个完整包所含字节 + */ + public int peekSize() + { + if (rcvQueue.Count == 0) + { + return -1; + } + + Segment seg = rcvQueue.First(); + //第一个包是一条应用层消息的最后一个分包?一条消息只有一个包的情况? + if (seg.Frg == 0) + { + return seg.Data.ReadableBytes; + } + + //接收队列长度小于应用层消息分包数量?接收队列空间不够用于接收完整的一个消息? + if (rcvQueue.Count < seg.Frg + 1) + { + // Some segments have not arrived yet + return -1; + } + + int len = 0; + var itr = rcvQueue.First; + while (itr != null) + { + var s = itr.Value; + len += s.Data.ReadableBytes; + if (s.Frg == 0) + { + break; + } + + itr = itr.Next; + } + + return len; + } + + + /** + * 判断一条消息是否完整收全了 + * @return + */ + public bool canRecv() + { + if (rcvQueue.Count == 0) + { + return false; + } + + Segment seg = rcvQueue.First(); + if (seg.Frg == 0) + { + return true; + } + + if (rcvQueue.Count < seg.Frg + 1) + { + // Some segments have not arrived yet + return false; + } + + return true; + } + + + public int send(IByteBuffer buf) + { + int len = buf.ReadableBytes; + if (len == 0) + { + return -1; + } + + // append to previous segment in streaming mode (if possible) + if (stream) + { + if (sndQueue.Count > 0) + { + Segment last = sndQueue.Last(); + IByteBuffer lastData = last.Data; + int lastLen = lastData.ReadableBytes; + if (lastLen < mss) + { + int capacity = mss - lastLen; + int extend = len < capacity ? len : capacity; + if (lastData.MaxWritableBytes < extend) + { + // extend + IByteBuffer newBuf = byteBufAllocator.DirectBuffer(lastLen + extend); + newBuf.WriteBytes(lastData); + lastData.Release(); + lastData = last.Data = newBuf; + } + + lastData.WriteBytes(buf, extend); + + len = buf.ReadableBytes; + if (len == 0) + { + return 0; + } + } + } + } + + int count; + if (len <= mss) + { + count = 1; + } + else + { + count = (len + mss - 1) / mss; + } + + if (count > 255) + { + // Maybe don't need the conditon in stream mode + return -2; + } + + if (count == 0) + { + // impossible + count = 1; + } + + // segment + for (int i = 0; i < count; i++) + { + int size = len > mss ? mss : len; + Segment seg = Segment.createSegment(buf.ReadRetainedSlice(size)); + seg.Frg = (short) (stream ? 0 : count - i - 1); + sndQueue.Enqueue(seg); +// sndQueue.add(seg); + len = buf.ReadableBytes; + } + + return 0; + } + + /** + * update ack. + * parse ack根据RTT计算SRTT和RTO即重传超时 + * @param rtt + */ + private void updateAck(int rtt) + { + if (rxSrtt == 0) + { + rxSrtt = rtt; + rxRttval = rtt >> 2; + } + else + { + int delta = rtt - rxSrtt; + rxSrtt += delta >> 3; + delta = Math.Abs(delta); + if (rtt < rxSrtt - rxRttval) + { + rxRttval += (delta - rxRttval) >> 5; + } + else + { + rxRttval += (delta - rxRttval) >> 2; + } + + //int delta = rtt - rxSrtt; + //if (delta < 0) { + // delta = -delta; + //} + //rxRttval = (3 * rxRttval + delta) / 4; + //rxSrtt = (7 * rxSrtt + rtt) / 8; + //if (rxSrtt < 1) { + // rxSrtt = 1; + //} + } + + int rto = rxSrtt + Math.Max(interval, rxRttval << 2); + rxRto = ibound(rxMinrto, rto, IKCP_RTO_MAX); + } + + private void shrinkBuf() + { + if (sndBuf.Count > 0) + { + Segment seg = sndBuf.First(); + sndUna = seg.Sn; + } + else + { + sndUna = sndNxt; + } + } + + private void parseAck(long sn) + { + if (itimediff(sn, sndUna) < 0 || itimediff(sn, sndNxt) >= 0) + { + return; + } + + var itr = sndBuf.First; + while (itr != null) + { + var next = itr.Next; + Segment seg = itr.Value; + if (sn == seg.Sn) + { + sndBuf.Remove(itr); + seg.recycle(true); + break; + } + + if (itimediff(sn, seg.Sn) < 0) + { + break; + } + + itr = next; + } + } + + private void parseUna(long una) + { + var itr = sndBuf.First; + while (itr != null) + { + var next = itr.Next; + Segment seg = itr.Value; + if (itimediff(una, seg.Sn) > 0) + { + sndBuf.Remove(itr); + seg.recycle(true); + } + else + { + break; + } + + itr = next; + } + } + + private void parseAckMask(long una, long ackMask) + { + if (ackMask == 0) + { + return; + } + + var itr = sndBuf.First; + while (itr != null) + { + var next = itr.Next; + Segment seg = itr.Value; + int index = (int) (seg.Sn - una - 1); + if (index < 0) + { + continue; + } + + if (index >= ackMaskSize) + break; + long mask = ackMask & 1 << index; + if (mask != 0) + { + sndBuf.Remove(itr); + seg.recycle(true); + } + + itr = next; + } + } + + + private void parseFastack(long sn, long ts) + { + if (itimediff(sn, sndUna) < 0 || itimediff(sn, sndNxt) >= 0) + { + return; + } + + foreach (var seg in sndBuf) + { + if (itimediff(sn, seg.Sn) < 0) + { + break; + //根据时间判断 在当前包时间之前的包才能被认定是需要快速重传的 + } + else if (sn != seg.Sn && itimediff(seg.Ts, ts) <= 0) + { + seg.Fastack++; + } + } + } + + + private void ackPush(long sn, long ts) + { + int newSize = 2 * (ackcount + 1); + + if (newSize > acklist.Count()) + { + int newCapacity = acklist.Count() << 1; // double capacity + +// if (newCapacity < 0) { +// throw new OutOfMemoryError(); +// } + + long[] newArray = new long[newCapacity]; + Array.Copy(acklist, 0, newArray, 0, acklist.Count()); + this.acklist = newArray; + } + + acklist[2 * ackcount] = sn; + acklist[2 * ackcount + 1] = ts; + ackcount++; + } + + private bool parseData(Segment newSeg) + { + long sn = newSeg.Sn; + + if (itimediff(sn, rcvNxt + rcvWnd) >= 0 || itimediff(sn, rcvNxt) < 0) + { + newSeg.recycle(true); + return true; + } + + bool repeat = false; + bool findPos = false; + + var last = rcvBuf.Last; + while (last != null) + { + var front = last.Previous; + Segment seg = last.Value; + if (seg.Sn == sn) + { + repeat = true; + //Snmp.snmp.RepeatSegs.incrementAndGet(); + break; + } + + if (itimediff(sn, seg.Sn) > 0) + { + findPos = true; + break; + } + + if (front == null) + { + break; + } + + last = front; + } + + if (repeat) + { + newSeg.recycle(true); + } + else if (last == null) + { + rcvBuf.AddLast(newSeg); + } + else if(findPos) + { + rcvBuf.AddAfter(last, newSeg); + } + else + { + rcvBuf.AddFirst(newSeg); + } + +// var firstSn = rcvBuf.First.Value.Sn; +// foreach (var segment in rcvBuf) +// { +// if (segment.Sn == firstSn) +// continue; +// firstSn++; +// if (firstSn != segment.Sn) +// { +// Console.WriteLine(); +// } +// +// +// } + + + + // move available data from rcv_buf -> rcv_queue + moveRcvData(); // Invoke the method only if the segment is not repeat? + return repeat; + } + + private void moveRcvData() + { + var itr = rcvBuf.First; + while (itr != null) + { + var next = itr.Next; + Segment seg = itr.Value; + if (seg.Sn == rcvNxt && rcvQueue.Count < rcvWnd) + { + rcvBuf.Remove(itr); + rcvQueue.AddLast(seg); + rcvNxt++; + } + else + { + break; + } + + itr = next; + } + } + + + public int input(IByteBuffer data, bool regular, long current) + { + long oldSndUna = sndUna; + if (data == null || data.ReadableBytes < IKCP_OVERHEAD) + { + return -1; + } +// if (log.isDebugEnabled()) { +// log.debug("{} [RI] {} bytes", this, data.readableBytes()); +// } + + + long latest = 0; // latest packet + bool flag = false; + int inSegs = 0; + + + long uintCurrent = long2Uint(current); + + while (true) + { + int conv, len, wnd; + long ts, sn, una, ackMask; + byte cmd; + short frg; + Segment seg; + + if (data.ReadableBytes < IKCP_OVERHEAD) + { + break; + } + + conv = data.ReadIntLE(); + if (conv != this.conv) + { + return -4; + } + + cmd = data.ReadByte(); + frg = data.ReadByte(); + wnd = data.ReadUnsignedShortLE(); + ts = data.ReadUnsignedIntLE(); + sn = data.ReadUnsignedIntLE(); + una = data.ReadUnsignedIntLE(); + len = data.ReadIntLE(); + + + switch (ackMaskSize) + { + case 8: + ackMask = data.ReadByte(); + break; + case 16: + ackMask = data.ReadUnsignedShortLE(); + break; + case 32: + ackMask = data.ReadUnsignedIntLE(); + break; + case 64: + //TODO need unsignedLongLe + ackMask = data.ReadLongLE(); + break; + default: + ackMask = 0; + break; + } + + ; + + + if (data.ReadableBytes < len) + { + return -2; + } + + if (cmd != IKCP_CMD_PUSH && cmd != IKCP_CMD_ACK && cmd != IKCP_CMD_WASK && cmd != IKCP_CMD_WINS) + { + return -3; + } + + //最后收到的来计算远程窗口大小 + if (regular) + { + this.rmtWnd = wnd; //更新远端窗口大小删除已确认的包,una以前的包对方都收到了,可以把本地小于una的都删除掉 + } + + //this.rmtWnd = wnd; + parseUna(una); + shrinkBuf(); + + + bool readed = false; + switch (cmd) + { + case IKCP_CMD_ACK: + { + parseAck(sn); + parseFastack(sn, ts); + flag = true; + latest = ts; + int rtt = itimediff(uintCurrent, ts); +// Debug.Log(GetHashCode()+" input ack: sn="+sn+", rtt="+rtt+", rto="+rxRto+",regular="+regular); +// if (log.isDebugEnabled()) { +// log.debug("{} input ack: sn={}, rtt={}, rto={} ,regular={}", this, sn, rtt, rxRto,regular); +// } + break; + } + case IKCP_CMD_PUSH: + { + bool repeat = true; + if (itimediff(sn, rcvNxt + rcvWnd) < 0) + { + ackPush(sn, ts); + if (itimediff(sn, rcvNxt) >= 0) + { + if (len > 0) + { + seg = Segment.createSegment(data.ReadRetainedSlice(len)); + readed = true; + } + else + { + seg = Segment.createSegment(byteBufAllocator, 0); + } + + seg.Conv = conv; + seg.Cmd = cmd; + seg.Frg = frg; + seg.Wnd = wnd; + seg.Ts = ts; + seg.Sn = sn; + seg.Una = una; + repeat = parseData(seg); + } + } + + if (regular && repeat) + { + Snmp.snmp.RepeatSegs++; + } + +// if (log.isDebugEnabled()) { +// log.debug("{} input push: sn={}, una={}, ts={},regular={}", this, sn, una, ts,regular); +// } + +// Console.WriteLine(GetHashCode()+" input push: sn="+sn+", una="+una+", ts="+ts+",regular="+regular); + break; + } + case IKCP_CMD_WASK: + { + // ready to send back IKCP_CMD_WINS in ikcp_flush + // tell remote my window size + probe |= IKCP_ASK_TELL; +// if (log.isDebugEnabled()) { +// log.debug("{} input ask", this); +// } + break; + } + case IKCP_CMD_WINS: + { + // do nothing +// if (log.isDebugEnabled()) { +// log.debug("{} input tell: {}", this, wnd); +// } + break; + } + default: + return -3; + } + + parseAckMask(una, ackMask); + + if (!readed) + { + data.SkipBytes(len); + } + + inSegs++; + } + +// if (data.ReadableBytes > 0) +// { +// Console.WriteLine("ReadableBytes"+data.ReadableBytes); +// } + + Snmp.snmp.InSegs += inSegs; + + if (flag && regular) + { + int rtt = itimediff(uintCurrent, latest); + if (rtt >= 0) + { + updateAck(rtt); //收到ack包,根据ack包的时间计算srtt和rto + } + } + + if (!nocwnd) + { + if (itimediff(sndUna, oldSndUna) > 0) + { + if (cwnd < rmtWnd) + { + int mss = this.mss; + if (cwnd < ssthresh) + { + cwnd++; + incr += mss; + } + else + { + if (incr < mss) + { + incr = mss; + } + + incr += (mss * mss) / incr + (mss / 16); + if ((cwnd + 1) * mss <= incr) + { + cwnd++; + } + } + + if (cwnd > rmtWnd) + { + cwnd = rmtWnd; + incr = rmtWnd * mss; + } + } + } + } + + + if (ackNoDelay && ackcount > 0) + { + // ack immediately + flush(true, current); + } + + return 0; + } + + + private int wndUnused() + { + if (rcvQueue.Count < rcvWnd) + { + return rcvWnd - rcvQueue.Count; + } + + return 0; + } + + + private IByteBuffer makeSpace(IByteBuffer buffer, int space) + { + if (buffer.ReadableBytes + space > mtu) + { + outPut(buffer, this); + buffer = createFlushByteBuf(); + buffer.SetWriterIndex(reserved); + } + + return buffer; + } + + private void flushBuffer(IByteBuffer buffer) + { + if (buffer.ReadableBytes > reserved) + { + outPut(buffer, this); + return; + } + + buffer.Release(); + } + + + private readonly long startTicks = DateTime.Now.Ticks; + + public long currentMs() + { + long currentTicks = DateTime.Now.Ticks; + return (currentTicks - startTicks) / TimeSpan.TicksPerMillisecond; + } + + /** + * ikcp_flush + */ + public long flush(bool ackOnly, long current) + { + // 'ikcp_update' haven't been called. + //if (!updated) { + // return; + //} + + //long current = this.current; + //long uintCurrent = long2Uint(current); + + Segment seg = Segment.createSegment(byteBufAllocator, 0); + seg.Conv = conv; + seg.Cmd = IKCP_CMD_ACK; + seg.AckMaskSize = this.ackMaskSize; + seg.Wnd = wndUnused(); //可接收数量 + seg.Una = rcvNxt; //已接收数量,下次要接收的包的sn,这sn之前的包都已经收到 + + IByteBuffer buffer = createFlushByteBuf(); + buffer.SetWriterIndex(reserved); + + + //计算ackMask + int count = ackcount; + + if (lastRcvNxt != rcvNxt) + { + ackMask = 0; + lastRcvNxt = rcvNxt; + } + + for (int i = 0; i < count; i++) + { + long sn = acklist[i * 2]; + if (sn < rcvNxt) + continue; + int index = (int) (sn - rcvNxt - 1); + if (index >= ackMaskSize) + break; + if (index >= 0) + { + ackMask |= 1 << index; + } + } + + seg.AckMask = ackMask; + + + // flush acknowledges有收到的包需要确认,则发确认包 + for (int i = 0; i < count; i++) + { + buffer = makeSpace(buffer, IKCP_OVERHEAD); + long sn = acklist[i * 2]; + if (sn >= rcvNxt || count - 1 == i) + { + seg.Sn = sn; + seg.Ts = acklist[i * 2 + 1]; + encodeSeg(buffer, seg); + +// Console.WriteLine(GetHashCode()+"flush ack: sn="+seg.Sn+", ts="+seg.Ts+" ,count="+count+" Una"+seg.Una); + +// if (log.isDebugEnabled()) { +// log.debug("{} flush ack: sn={}, ts={} ,count={}", this, seg.sn, seg.ts,count); +// } + } + } + + ackcount = 0; + + + if (ackOnly) + { + flushBuffer(buffer); + seg.recycle(true); + return interval; + } + + // probe window size (if remote window size equals zero) + //拥堵控制 如果对方可接受窗口大小为0 需要询问对方窗口大小 + if (rmtWnd == 0) + { + current = currentMs(); + if (probeWait == 0) + { + probeWait = IKCP_PROBE_INIT; + tsProbe = current + probeWait; + } + else + { + if (itimediff(current, tsProbe) >= 0) + { + if (probeWait < IKCP_PROBE_INIT) + { + probeWait = IKCP_PROBE_INIT; + } + + probeWait += probeWait / 2; + if (probeWait > IKCP_PROBE_LIMIT) + { + probeWait = IKCP_PROBE_LIMIT; + } + + tsProbe = current + probeWait; + probe |= IKCP_ASK_SEND; + } + } + } + else + { + tsProbe = 0; + probeWait = 0; + } + + // flush window probing commands + if ((probe & IKCP_ASK_SEND) != 0) + { + seg.Cmd = IKCP_CMD_WASK; + buffer = makeSpace(buffer, IKCP_OVERHEAD); + encodeSeg(buffer, seg); +// if (log.isDebugEnabled()) { +// log.debug("{} flush ask", this); +// } + } + + // flush window probing commands + if ((probe & IKCP_ASK_TELL) != 0) + { + seg.Cmd = IKCP_CMD_WINS; + buffer = makeSpace(buffer, IKCP_OVERHEAD); + encodeSeg(buffer, seg); +// if (log.isDebugEnabled()) { +// log.debug("{} flush tell: wnd={}", this, seg.wnd); +// } + } + + probe = 0; + + // calculate window size + int cwnd0 = Math.Min(sndWnd, rmtWnd); + if (!nocwnd) + { + cwnd0 = Math.Min(this.cwnd, cwnd0); + } + + int newSegsCount = 0; + // move data from snd_queue to snd_buf + while (itimediff(sndNxt, sndUna + cwnd0) < 0) + { + if (sndQueue.Count == 0) + { + break; + } + var newSeg = sndQueue.Dequeue(); + newSeg.Conv = conv; + newSeg.Cmd = IKCP_CMD_PUSH; + newSeg.Sn = sndNxt; + sndBuf.AddLast(newSeg); +// sndBuf.AddLast(newSeg); + sndNxt++; + newSegsCount++; + } + + // calculate resent + int resent = fastresend > 0 ? fastresend : 0x7fffffff; + + // flush data segments + current = currentMs(); + int change = 0; + bool lost = false; + int lostSegs = 0, fastRetransSegs = 0, earlyRetransSegs = 0; + long minrto = interval; + var itr = sndBuf.First; + + while (itr != null) + { + var next = itr.Next; + Segment segment = itr.Value; + itr = next; + + bool needsend = false; + if (segment.Xmit == 0) + { + needsend = true; + segment.Rto = rxRto; + segment.Resendts = current + segment.Rto; +// if (log.isDebugEnabled()) { +// log.debug("{} flush data: sn={}, resendts={}", this, segment.sn, (segment.resendts - current)); +// } + } + else if (segment.Fastack >= resent) + { + needsend = true; + segment.Fastack = 0; + segment.Rto = rxRto; + segment.Resendts = current + segment.Rto; + change++; + fastRetransSegs++; +// if (log.isDebugEnabled()) { +// log.debug("{} fastresend. sn={}, xmit={}, resendts={} ", this, segment.sn, segment.xmit, (segment +// .resendts - current)); +// } + } + else if (segment.Fastack > 0 && newSegsCount == 0) + { + // early retransmit + needsend = true; + segment.Fastack = 0; + segment.Rto = rxRto; + segment.Resendts = current + segment.Rto; + change++; + earlyRetransSegs++; + } + else if (itimediff(current, segment.Resendts) >= 0) + { + needsend = true; + if (!nodelay) + { + segment.Rto += rxRto; + } + else + { + segment.Rto += rxRto / 2; + } + + segment.Fastack = 0; + segment.Resendts = current + segment.Rto; + lost = true; + lostSegs++; +// if (log.isDebugEnabled()) { +// log.debug("{} resend. sn={}, xmit={}, resendts={}", this, segment.sn, segment.xmit, (segment +// .resendts - current)); +// } + } + + + if (needsend) + { + segment.Xmit++; + segment.Ts = long2Uint(current); + segment.Wnd = seg.Wnd; + segment.Una = rcvNxt; + segment.AckMaskSize = this.ackMaskSize; + segment.AckMask = ackMask; + + IByteBuffer segData = segment.Data; + int segLen = segData.ReadableBytes; + int need = IKCP_OVERHEAD + segLen; + buffer = makeSpace(buffer, need); + + //if (buffer.readableBytes() + need > mtu) { + // output(buffer, this); + // buffer = createFlushByteBuf(); + //} + + encodeSeg(buffer, segment); + + if (segLen > 0) + { + // don't increases data's readerIndex, because the data may be resend. + buffer.WriteBytes(segData, segData.ReaderIndex, segLen); + } + + if (segment.Xmit >= deadLink) + { + state = -1; + } + + // get the nearest rto + long rto = itimediff(segment.Resendts, current); + if (rto > 0 && rto < minrto) + { + minrto = rto; + } + } + } + + // flash remain segments + flushBuffer(buffer); + seg.recycle(true); + + int sum = lostSegs; + if (lostSegs > 0) + { + Snmp.snmp.LostSegs += lostSegs; + } + + if (fastRetransSegs > 0) + { + Snmp.snmp.FastRetransSegs += fastRetransSegs; + sum += fastRetransSegs; + } + + if (earlyRetransSegs > 0) + { + Snmp.snmp.EarlyRetransSegs += earlyRetransSegs; + sum += earlyRetransSegs; + } + + if (sum > 0) + { + Snmp.snmp.RetransSegs += sum; + } + + // update ssthresh + if (!nocwnd) + { + if (change > 0) + { + int inflight = (int) (sndNxt - sndUna); + ssthresh = inflight / 2; + if (ssthresh < IKCP_THRESH_MIN) + { + ssthresh = IKCP_THRESH_MIN; + } + + cwnd = ssthresh + resent; + incr = cwnd * mss; + } + + if (lost) + { + ssthresh = cwnd0 / 2; + if (ssthresh < IKCP_THRESH_MIN) + { + ssthresh = IKCP_THRESH_MIN; + } + + cwnd = 1; + incr = mss; + } + + if (cwnd < 1) + { + cwnd = 1; + incr = mss; + } + } + + return minrto; + } + + + /** + * update getState (call it repeatedly, every 10ms-100ms), or you can ask + * ikcp_check when to call it again (without ikcp_input/_send calling). + * 'current' - current timestamp in millisec. + * + * @param current + */ + public void update(long current) + { + if (!updated) + { + updated = true; + tsFlush = current; + } + + int slap = itimediff(current, tsFlush); + + if (slap >= 10000 || slap < -10000) + { + tsFlush = current; + slap = 0; + } + + /*if (slap >= 0) { + tsFlush += setInterval; + if (itimediff(this.current, tsFlush) >= 0) { + tsFlush = this.current + setInterval; + } + flush(); + }*/ + + if (slap >= 0) + { + tsFlush += interval; + if (itimediff(current, tsFlush) >= 0) + { + tsFlush = current + interval; + } + } + else + { + tsFlush = current + interval; + } + + flush(false, current); + } + + /** + * Determine when should you invoke ikcp_update: + * returns when you should invoke ikcp_update in millisec, if there + * is no ikcp_input/_send calling. you can call ikcp_update in that + * time, instead of call update repeatly. + * Important to reduce unnacessary ikcp_update invoking. use it to + * schedule ikcp_update (eg. implementing an epoll-like mechanism, + * or optimize ikcp_update when handling massive kcp connections) + * + * @param current + * @return + */ + public long check(long current) + { + if (!updated) + { + return current; + } + + long tsFlush = this.tsFlush; + int slap = itimediff(current, tsFlush); + if (slap >= 10000 || slap < -10000) + { + tsFlush = current; + slap = 0; + } + + if (slap >= 0) + { + return current; + } + + int tmFlush = itimediff(tsFlush, current); + int tmPacket = 0x7fffffff; + + foreach (var seg in sndBuf) + { + int diff = itimediff(seg.Resendts, current); + if (diff <= 0) + { + return current; + } + + if (diff < tmPacket) + { + tmPacket = diff; + } + } + + + int minimal = tmPacket < tmFlush ? tmPacket : tmFlush; + if (minimal >= interval) + { + minimal = interval; + } + + return current + minimal; + } + + + public bool checkFlush() + { + if (ackcount > 0) + { + return true; + } + + if (probe != 0) + { + return true; + } + + if (sndBuf.Count > 0) + { + return true; + } + + if (sndQueue.Count > 0) + { + return true; + } + + return false; + } + + public int setMtu(int mtu) + { + if (mtu < IKCP_OVERHEAD || mtu < 50) + { + return -1; + } + + if (reserved >= mtu - IKCP_OVERHEAD || reserved < 0) + { + return -1; + } + + this.mtu = mtu; + this.mss = mtu - IKCP_OVERHEAD - reserved; + return 0; + } + + + public int setInterval(int interval) + { + if (interval > 5000) + { + interval = 5000; + } + else if (interval < 10) + { + interval = 10; + } + + this.interval = interval; + + return 0; + } + + public int initNodelay(bool nodelay, int interval, int resend, bool nc) + { + this.nodelay = nodelay; + if (nodelay) + { + this.rxMinrto = IKCP_RTO_NDL; + } + else + { + this.rxMinrto = IKCP_RTO_MIN; + } + + if (interval >= 0) + { + if (interval > 5000) + { + interval = 5000; + } + else if (interval < 10) + { + interval = 10; + } + + this.interval = interval; + } + + if (resend >= 0) + { + fastresend = resend; + } + + this.nocwnd = nc; + + return 0; + } + + public int waitSnd() + { + return this.sndBuf.Count + this.sndQueue.Count; + } + + public void SetNodelay(bool nodelay) + { + this.nodelay = nodelay; + if (nodelay) + { + this.rxMinrto = IKCP_RTO_NDL; + } + else + { + this.rxMinrto = IKCP_RTO_MIN; + } + } + + + public void setAckMaskSize(int ackMaskSize) + { + this.ackMaskSize = ackMaskSize; + this.IKCP_OVERHEAD += (ackMaskSize / 8); + this.mss = mtu - IKCP_OVERHEAD - reserved; + } + + public void setReserved(int reserved) + { + this.reserved = reserved; + this.mss = mtu - IKCP_OVERHEAD - reserved; + } + + + public int Conv + { + get => conv; + set => conv = value; + } + + public int Mtu + { + get => mtu; + set => mtu = value; + } + + public int Mss + { + get => mss; + set => mss = value; + } + + public long SndUna + { + get => sndUna; + set => sndUna = value; + } + + public long SndNxt + { + get => sndNxt; + set => sndNxt = value; + } + + public long RcvNxt + { + get => rcvNxt; + set => rcvNxt = value; + } + + public bool AckNoDelay + { + get => ackNoDelay; + set => ackNoDelay = value; + } + + public object User + { + get => user; + set => user = value; + } + + public int Fastresend + { + get => fastresend; + set => fastresend = value; + } + + public bool Nocwnd + { + get => nocwnd; + set => nocwnd = value; + } + + public bool Stream + { + get => stream; + set => stream = value; + } + + public int RcvWnd + { + get => rcvWnd; + set => rcvWnd = value; + } + + public int SndWnd + { + get => sndWnd; + set => sndWnd = value; + } + + public int RxMinrto + { + get => rxMinrto; + set => rxMinrto = value; + } + + public KcpOutput Output + { + get => output; + set => output = value; + } + + public int Interval + { + get => interval; + set => interval = value; + } + + public bool Nodelay + { + get => nodelay; + set => nodelay = value; + } + + public int DeadLink + { + get => deadLink; + set => deadLink = value; + } + + public IByteBufferAllocator ByteBufAllocator + { + get => byteBufAllocator; + set => byteBufAllocator = value; + } + + public int State + { + get => state; + set => state = value; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/Kcp.cs.meta b/Runtime/csharp-kcp/base-kcp/Kcp.cs.meta new file mode 100644 index 0000000..b103d78 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/Kcp.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 32635b28aaed380498f8ac95dec086d9 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/KcpOutput.cs b/Runtime/csharp-kcp/base-kcp/KcpOutput.cs new file mode 100644 index 0000000..8df0049 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/KcpOutput.cs @@ -0,0 +1,9 @@ +using DotNetty.Buffers; + +namespace base_kcp +{ + public interface KcpOutput + { + void outPut(IByteBuffer data, Kcp kcp); + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/KcpOutput.cs.meta b/Runtime/csharp-kcp/base-kcp/KcpOutput.cs.meta new file mode 100644 index 0000000..3931d82 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/KcpOutput.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 72f6b06e20a7e0b4fba316766efa8fb8 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/KcpUntils.cs b/Runtime/csharp-kcp/base-kcp/KcpUntils.cs new file mode 100644 index 0000000..a29b461 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/KcpUntils.cs @@ -0,0 +1,13 @@ +using System; +using DotNetty.Common.Utilities; + +namespace base_kcp +{ + public class KcpUntils + { + public static long currentMs() + { + return DateTime.Now.Ticks/TimeSpan.TicksPerMillisecond; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/KcpUntils.cs.meta b/Runtime/csharp-kcp/base-kcp/KcpUntils.cs.meta new file mode 100644 index 0000000..9bedb06 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/KcpUntils.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 272ca1c4ee0516b4db0f07ce0300e2b5 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/LatencySimulator.cs b/Runtime/csharp-kcp/base-kcp/LatencySimulator.cs new file mode 100644 index 0000000..7c62a31 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/LatencySimulator.cs @@ -0,0 +1,390 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using DotNetty.Buffers; +using DotNetty.Common.Utilities; +using base_kcp; + +namespace fec +{ + public class LatencySimulator : KcpOutput + { + private static long long2Uint(long n) + { + return n & 0x00000000FFFFFFFFL; + } + + private long current; + + /** + * 丢包率 + **/ + private int lostrate; + private int rttmin; + private int rttmax; + private LinkedList p12 = new LinkedList(); + private LinkedList p21 = new LinkedList(); + private Random r12 = new Random(); + private Random r21 = new Random(); + + private Random _random = new Random(); + + + // lostrate: 往返一周丢包率的百分比,默认 10% + // rttmin:rtt最小值,默认 60 + // rttmax:rtt最大值,默认 125 + //func (p *LatencySimulator)Init(int lostrate = 10, int rttmin = 60, int rttmax = 125, int nmax = 1000): + public void init(int lostrate, int rttmin, int rttmax) + { + this.current = DateTime.Now.Ticks/10000; + this.lostrate = lostrate / 2; // 上面数据是往返丢包率,单程除以2 + this.rttmin = rttmin / 2; + this.rttmax = rttmax / 2; + } + + + // 发送数据 + // peer - 端点0/1,从0发送,从1接收;从1发送从0接收 + public int send(int peer, IByteBuffer data) + { + int rnd; + if (peer == 0) + { + rnd = r12.Next(100); + } + else + { + rnd = r21.Next(100); + } + + //println("!!!!!!!!!!!!!!!!!!!!", rnd, p.lostrate, peer) + if (rnd < lostrate) + { + return 0; + } + + DelayPacket pkt = new DelayPacket(); + pkt.init(data); + current = DateTime.Now.Ticks/10000; + int delay = rttmin; + if (rttmax > rttmin) + { + delay += _random.Next(10000) % (rttmax - rttmin); + } + + pkt.setTs(current + delay); + if (peer == 0) + { + p12.AddLast(pkt); + } + else + { + p21.AddLast(pkt); + } + + return 1; + } + + // 接收数据 + public int recv(int peer, IByteBuffer data) + { + DelayPacket pkt; + if (peer == 0) + { + if (p21.Count == 0) + { + return -1; + } + + pkt = p21.First.Value; + } + else + { + if (p12.Count == 0) + { + return -1; + } + + pkt = p12.First.Value; + } + + current = DateTime.Now.Ticks/10000; + + if (current < pkt.getTs()) + { + return -2; + } + + if (peer == 0) + { + p21.RemoveFirst(); + } + else + { + p12.RemoveFirst(); + } + + int maxsize = pkt.getPtr().ReadableBytes; +// IByteBuffer data1 = data; +// IByteBuffer data2 = pkt.getPtr(); +// Console.WriteLine(data1.AddressOfPinnedMemory().ToString()); +// Console.WriteLine(data2.AddressOfPinnedMemory().ToString()); + + data.WriteBytes(pkt.getPtr()); +// data2.Release(); + pkt.Release(); +// Console.WriteLine(data.ReferenceCount); + return maxsize; + } + + + public static void Main(String[] args) + { +// LatencySimulator latencySimulator = new LatencySimulator(); +// try +// { +//// //latencySimulator.test(0); +//// //latencySimulator.test(1); +// latencySimulator.test(2); +// } +// catch (Exception e) +// { +// Console.WriteLine(e); +// } + +// latencySimulator.BenchmarkFlush(); + } + + + //测试flush性能 + public void BenchmarkFlush() + { + Kcp kcp = new Kcp(1, new LatencySimulator()); + for (int i = 0; i < 1000; i++) + { + Segment segment = Segment.createSegment(null); + kcp.sndBuf.AddLast(segment); + } + + foreach (var seg in kcp.sndBuf) + { + seg.Xmit = 1; + seg.Resendts = kcp.currentMs() + 10000; + } + + //预热 + for (int i = 0; i < 1000000; i++) + { + kcp.flush(false, kcp.currentMs()); + } + + long start = kcp.currentMs(); + for (int i = 0; i < 200000; i++) + { + kcp.flush(false, kcp.currentMs()); + } + + Console.WriteLine((kcp.currentMs() - start) / 200000); + } + + + + class TestOutPut:KcpOutput + { + private LatencySimulator vnet; + private int id; + + public TestOutPut(LatencySimulator vnet,int id) + { + this.vnet = vnet; + this.id = id; + } + + public void outPut(IByteBuffer data, Kcp kcp) + { + vnet.send(id, data); + data.Release(); + } + } + + + + + public void test(int mode) + { + LatencySimulator vnet = new LatencySimulator(); + vnet.init(20, 600, 600); + TestOutPut output1 = new TestOutPut(vnet, 0); + TestOutPut output2 = new TestOutPut(vnet, 1); + + + Kcp kcp1 = new Kcp(0x11223344, output1); + Kcp kcp2 = new Kcp(0x11223344, output2); + //kcp1.setAckMaskSize(8); + //kcp2.setAckMaskSize(8); + + current = long2Uint(kcp1.currentMs()); + long slap = current + 20; + int index = 0; + int next = 0; + long sumrtt = 0; + int count = 0; + int maxrtt = 0; + kcp1.RcvWnd = 512; + kcp1.SndWnd = 512; + kcp2.RcvWnd = 512; + kcp2.SndWnd = 512; + + // 判断测试用例的模式 + if (mode == 0) + { + // 默认模式 + kcp1.initNodelay(false, 10, 0, false); + kcp2.initNodelay(false, 10, 0, false); + } + else if (mode == 1) + { + // 普通模式,关闭流控等 + kcp1.initNodelay(false, 10, 0, true); + kcp2.initNodelay(false, 10, 0, true); + } + else + { + // 启动快速模式 + // 第二个参数 nodelay-启用以后若干常规加速将启动 + // 第三个参数 interval为内部处理时钟,默认设置为 10ms + // 第四个参数 resend为快速重传指标,设置为2 + // 第五个参数 为是否禁用常规流控,这里禁止 + kcp1.initNodelay(true, 10, 2, true); + kcp2.initNodelay(true, 10, 2, true); + kcp1.RxMinrto = 10; + kcp1.Fastresend = 1; + } + + int hr; + long ts1 = kcp1.currentMs(); + + //写数据 定时更新 + for (;;) + { + current = long2Uint(kcp1.currentMs()); + Thread.Sleep(1); + long now = kcp1.currentMs(); + kcp1.update(now); + kcp2.update(now); + + + //每隔 20ms,kcp1发送数据 + for (; current >= slap; slap += 20) + { + IByteBuffer buf = PooledByteBufferAllocator.Default.Buffer(); + buf.WriteIntLE(index); + index++; + buf.WriteIntLE((int) current); + kcp1.send(buf); + buf.Release(); + } + + //处理虚拟网络:检测是否有udp包从p1->p2 + for (;;) + { + IByteBuffer buffer = PooledByteBufferAllocator.Default.DirectBuffer(2000); +// Console.WriteLine("buffer:" +buffer.AddressOfPinnedMemory().ToString()); + try + { + hr = vnet.recv(1, buffer); + if (hr < 0) + { + break; + } + + kcp2.input(buffer, true, kcp1.currentMs()); + } + finally + { + buffer.Release(); + } + } + + // 处理虚拟网络:检测是否有udp包从p2->p1 + for (;;) + { + IByteBuffer buffer = PooledByteBufferAllocator.Default.Buffer(2000); + try + { + hr = vnet.recv(0, buffer); + if (hr < 0) + { + break; + } + + // 如果 p1收到udp,则作为下层协议输入到kcp1 + kcp1.input(buffer, true, kcp1.currentMs()); + } + finally + { + buffer.Release(); + } + } + + // kcp2接收到任何包都返回回去 + List bufList = new List(); + kcp2.recv(bufList); + foreach (var byteBuf in bufList) + { + kcp2.send(byteBuf); + byteBuf.Release(); + } + + // kcp1收到kcp2的回射数据 + bufList = new List(); + kcp1.recv(bufList); + foreach (var byteBuf in bufList){ + long sn = byteBuf.ReadIntLE(); + long ts = byteBuf.ReadUnsignedIntLE(); + long rtt = 0; + rtt = current - ts; + Console.WriteLine("rtt :" + rtt); + + + if (sn != next) + { + // 如果收到的包不连续 + //for i:=0;i<8 ;i++ { + //println("---", i, buffer[i]) + //} + Console.WriteLine("ERROR sn " + count + "<->" + next + sn); + return; + } + + next++; + sumrtt += rtt; + count++; + if (rtt > maxrtt) + { + maxrtt = (int) rtt; + } + + byteBuf.Release(); + } + if (next > 1000) + { + break; + } + } + + ts1 = kcp1.currentMs() - ts1; + String[] names = new String[] {"default", "normal", "fast"}; + Console.WriteLine(names[mode]+" mode result :"+ts1+" \n"); + Console.WriteLine("avgrtt="+(sumrtt / count)+" maxrtt="+maxrtt+" \n"); + Console.WriteLine("lost percent: " + (Snmp.snmp.RetransSegs)+"\n"); + Console.WriteLine("snmp: " + (Snmp.snmp.ToString())); + } + + + public void outPut(IByteBuffer data, Kcp kcp) + { + throw new NotImplementedException(); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/LatencySimulator.cs.meta b/Runtime/csharp-kcp/base-kcp/LatencySimulator.cs.meta new file mode 100644 index 0000000..4bb1ed1 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/LatencySimulator.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 4346df527420c1d4eb9e9981d0eb44cf +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/Program.cs b/Runtime/csharp-kcp/base-kcp/Program.cs new file mode 100644 index 0000000..ea80260 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/Program.cs @@ -0,0 +1,25 @@ +using System; +using System.Runtime.CompilerServices; +using System.Threading; +using DotNetty.Buffers; + +namespace base_kcp +{ + class Program + { + + +// static void Main(string[] args) +// { +// Kcp kcp = new Kcp(1,null); +// Console.WriteLine(kcp.currentMs()); +// Thread.Sleep(5000); +// Console.WriteLine((uint)kcp.currentMs()); +//// var byteBuffer = UnpooledByteBufferAllocator.Default.DirectBuffer(10); +//// byteBuffer.WriteInt(55); +//// Console.WriteLine(byteBuffer.ReadInt()); +// +// +// } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/Program.cs.meta b/Runtime/csharp-kcp/base-kcp/Program.cs.meta new file mode 100644 index 0000000..80d2a31 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/Program.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 1d339c06d307ee64dbcdc8902fb4b350 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/Segment.cs b/Runtime/csharp-kcp/base-kcp/Segment.cs new file mode 100644 index 0000000..eee6e84 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/Segment.cs @@ -0,0 +1,177 @@ +using DotNetty.Buffers; +using DotNetty.Common; + +namespace base_kcp +{ + public class Segment + { + private readonly ThreadLocalPool.Handle recyclerHandle; + + /**会话id**/ + + /**命令**/ + private byte cmd; + + /**message中的segment分片ID(在message中的索引,由大到小,0表示最后一个分片)**/ + private short frg; + + /**剩余接收窗口大小(接收窗口大小-接收队列大小)**/ + private int wnd; + + /**message发送时刻的时间戳**/ + private long ts; + + /**message分片segment的序号**/ + private long sn; + + /**待接收消息序号(接收滑动窗口左端)**/ + private long una; + + /**下次超时重传的时间戳**/ + private long resendts; + + /**该分片的超时重传等待时间**/ + private int rto; + + /**收到ack时计算的该分片被跳过的累计次数,即该分片后的包都被对方收到了,达到一定次数,重传当前分片**/ + private int fastack; + + /***发送分片的次数,每发送一次加一**/ + private int xmit; + + private long ackMask; + + private IByteBuffer data; + + private int ackMaskSize; + + private static readonly ThreadLocalPool RECYCLER = + new ThreadLocalPool(handle => + { + return new Segment(handle); + }); + + private Segment(ThreadLocalPool.Handle recyclerHandle) + { + this.recyclerHandle =recyclerHandle; + } + + public void recycle(bool releaseBuf) { + Conv = 0; + cmd = 0; + frg = 0; + wnd = 0; + ts = 0; + sn = 0; + una = 0; + resendts = 0; + rto = 0; + fastack = 0; + xmit = 0; + ackMask=0; + if (releaseBuf) { + data.Release(); + } + data = null; + recyclerHandle.Release(this); + } + + public static Segment createSegment(IByteBufferAllocator byteBufAllocator, int size) { + Segment seg = RECYCLER.Take(); + if (size == 0) { + seg.data = byteBufAllocator.DirectBuffer(0, 0); + } else { + seg.data = byteBufAllocator.DirectBuffer(size); + } + return seg; + } + + + public static Segment createSegment(IByteBuffer buf) { + Segment seg = RECYCLER.Take(); + seg.data = buf; + return seg; + } + + + public int Conv { get; set; } + + public byte Cmd + { + get => cmd; + set => cmd = value; + } + + public short Frg + { + get => frg; + set => frg = value; + } + + public int Wnd + { + get => wnd; + set => wnd = value; + } + + public long Ts + { + get => ts; + set => ts = value; + } + + public long Sn + { + get => sn; + set => sn = value; + } + + public long Una + { + get => una; + set => una = value; + } + + public long Resendts + { + get => resendts; + set => resendts = value; + } + + public int Rto + { + get => rto; + set => rto = value; + } + + public int Fastack + { + get => fastack; + set => fastack = value; + } + + public int Xmit + { + get => xmit; + set => xmit = value; + } + + public long AckMask + { + get => ackMask; + set => ackMask = value; + } + + public IByteBuffer Data + { + get => data; + set => data = value; + } + + public int AckMaskSize + { + get => ackMaskSize; + set => ackMaskSize = value; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/Segment.cs.meta b/Runtime/csharp-kcp/base-kcp/Segment.cs.meta new file mode 100644 index 0000000..1bc328b --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/Segment.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 3f4d18b06d9a87a4ea2cd5584cf6ebd7 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/fec.meta b/Runtime/csharp-kcp/base-kcp/fec.meta new file mode 100644 index 0000000..09ea02e --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: db279a093e32eae46ab5d1c61bed88ab +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoop.cs b/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoop.cs new file mode 100644 index 0000000..ae9b9b0 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoop.cs @@ -0,0 +1,68 @@ +using DotNetty.Buffers; + +namespace fec +{ + public interface ByteBufCodingLoop + { + /** + * Multiplies a subset of rows from a coding matrix by a full set of + * input shards to produce some output shards. + * + * @param matrixRows The rows from the matrix to use. + * @param inputs An array of byte arrays, each of which is one input shard. + * The inputs array may have extra buffers after the ones + * that are used. They will be ignored. The number of + * inputs used is determined by the length of the + * each matrix row. + * @param inputCount The number of input byte arrays. + * @param outputs Byte arrays where the computed shards are stored. The + * outputs array may also have extra, unused, elements + * at the end. The number of outputs computed, and the + * number of matrix rows used, is determined by + * outputCount. + * @param outputCount The number of outputs to compute. + * @param offset The index in the inputs and output of the first byte + * to process. + * @param byteCount The number of bytes to process. + */ + void codeSomeShards(byte[][] matrixRows, + IByteBuffer[] inputs, + int inputCount, + IByteBuffer[] outputs, + int outputCount, + int offset, + int byteCount); + + /** + * Multiplies a subset of rows from a coding matrix by a full set of + * input shards to produce some output shards, and checks that the + * the data is those shards matches what's expected. + * + * @param matrixRows The rows from the matrix to use. + * @param inputs An array of byte arrays, each of which is one input shard. + * The inputs array may have extra buffers after the ones + * that are used. They will be ignored. The number of + * inputs used is determined by the length of the + * each matrix row. + * @param inputCount THe number of input byte arrays. + * @param toCheck Byte arrays where the computed shards are stored. The + * outputs array may also have extra, unused, elements + * at the end. The number of outputs computed, and the + * number of matrix rows used, is determined by + * outputCount. + * @param checkCount The number of outputs to compute. + * @param offset The index in the inputs and output of the first byte + * to process. + * @param byteCount The number of bytes to process. + * @param tempBuffer A place to store temporary results. May be null. + */ + bool checkSomeShards(byte[][] matrixRows, + IByteBuffer[] inputs, + int inputCount, + byte[][] toCheck, + int checkCount, + int offset, + int byteCount, + byte[] tempBuffer); + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoop.cs.meta b/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoop.cs.meta new file mode 100644 index 0000000..4a7056b --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoop.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 64e86e8aa56eee14eb05a4300d3192c9 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoopBase.cs b/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoopBase.cs new file mode 100644 index 0000000..a6cf279 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoopBase.cs @@ -0,0 +1,39 @@ +using DotNetty.Buffers; + +namespace fec +{ + public class ByteBufCodingLoopBase : ByteBufCodingLoop + { + public virtual void codeSomeShards(byte[][] matrixRows, IByteBuffer[] inputs, int inputCount, + IByteBuffer[] outputs, int outputCount, + int offset, int byteCount) + { + } + + public virtual bool checkSomeShards(byte[][] matrixRows, IByteBuffer[] inputs, int inputCount, byte[][] toCheck, + int checkCount, + int offset, int byteCount, byte[] tempBuffer) + { + byte[][] table = Galois.MULTIPLICATION_TABLE; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + for (int iOutput = 0; iOutput < checkCount; iOutput++) + { + byte[] matrixRow = matrixRows[iOutput]; + int value = 0; + for (int iInput = 0; iInput < inputCount; iInput++) + { + value ^= table[matrixRow[iInput] & 0xFF][inputs[iInput].GetByte(iByte) & 0xFF]; + } + + if (toCheck[iOutput][iByte] != (byte) value) + { + return false; + } + } + } + + return true; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoopBase.cs.meta b/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoopBase.cs.meta new file mode 100644 index 0000000..b0f8fcc --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/ByteBufCodingLoopBase.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 263b876cb89a2b049a12a330730390da +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/fec/Fec.cs b/Runtime/csharp-kcp/base-kcp/fec/Fec.cs new file mode 100644 index 0000000..2a4a524 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/Fec.cs @@ -0,0 +1,11 @@ +namespace fec.fec +{ + public class Fec + { + public const int fecHeaderSize = 6, + fecDataSize = 2, + fecHeaderSizePlus2 = fecHeaderSize + fecDataSize, // plus 2B data size + typeData = 0xf1, + typeParity = 0xf2; + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/fec/Fec.cs.meta b/Runtime/csharp-kcp/base-kcp/fec/Fec.cs.meta new file mode 100644 index 0000000..7f5a242 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/Fec.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: f6f2fdf75b102254984ec98a36d3e4e1 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/fec/FecDecode.cs b/Runtime/csharp-kcp/base-kcp/fec/FecDecode.cs new file mode 100644 index 0000000..35f0aeb --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/FecDecode.cs @@ -0,0 +1,307 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using DotNetty.Buffers; +using fec; + +namespace fec.fec +{ + public class FecDecode + { + // queue size limit + private readonly int rxlimit; + private readonly int dataShards; + + private int parityShards; + + /** dataShards+parityShards **/ + private readonly int shardSize; + + // ordered receive queue + private readonly List rx; + + private readonly IByteBuffer[] decodeCache; + + /**标记是否已经缓存了**/ + private readonly bool[] flagCache; + + private readonly ReedSolomon codec; + + private readonly IByteBuffer zeros; + + + public FecDecode(int rxlimit, ReedSolomon codec, int mtu) + { + this.rxlimit = rxlimit; + this.dataShards = codec.getDataShardCount(); + this.parityShards = codec.getParityShardCount(); + this.shardSize = dataShards + parityShards; + + if (dataShards <= 0 || parityShards <= 0) + { + throw new Exception("dataShards and parityShards can not less than 0"); + } + + if (rxlimit < dataShards + parityShards) + { + throw new Exception(""); + } + + this.codec = codec; + this.decodeCache = new IByteBuffer[this.shardSize]; + this.flagCache = new bool[this.shardSize]; + this.rx = new List(rxlimit); + + zeros = PooledByteBufferAllocator.Default.DirectBuffer(mtu); + zeros.WriteBytes(new byte[mtu]); + } + + + public List decode(FecPacket pkt) + { + if (pkt.Flag == Fec.typeParity) + { + Snmp.snmp.FECParityShards++; + } + else + { + Snmp.snmp.FECDataShards++; + } + + int n = rx.Count - 1; + int insertIdx = 0; + for (int i = n; i >= 0; i--) + { + //去重 + if (pkt.Seqid == rx[i].Seqid) + { + Snmp.snmp.FECRepeatDataShards++; + pkt.release(); + return null; + } + + if (pkt.Seqid > rx[i].Seqid) + { + // insertion + insertIdx = i + 1; + break; + } + } + + //插入 rx中 + if (insertIdx == n + 1) + { + this.rx.Add(pkt); + } + else + { + rx.Insert(insertIdx, pkt); + } + + //所有消息列表中的第一个包 + // shard range for current packet + long shardBegin = pkt.Seqid - pkt.Seqid % shardSize; + long shardEnd = shardBegin + shardSize - 1; + + //rx数组中的第一个包 + // max search range in ordered queue for current shard + int searchBegin = (int) (insertIdx - pkt.Seqid % shardSize); + if (searchBegin < 0) + { + searchBegin = 0; + } + + int searchEnd = searchBegin + shardSize - 1; + if (searchEnd >= rx.Count) + { + searchEnd = rx.Count - 1; + } + + List result = null; + if (searchEnd - searchBegin + 1 >= dataShards) + { + //当前包组的已收到的包数量 + int numshard = 0; + //当前包组中属于数据包的数量 + int numDataShard = 0; + //搜到第一个包在搜索行中的位置 + int first = 0; + //收到的最大包的字节长度 + int maxlen = 0; + + // zero cache + IByteBuffer[] shards = decodeCache; + bool[] shardsflag = flagCache; + for (int i = 0; i < shards.Length; i++) + { + shards[i] = null; + shardsflag[i] = false; + } + + for (int i = searchBegin; i <= searchEnd; i++) + { + FecPacket fecPacket = rx[i]; + long seqid = fecPacket.Seqid; + if (seqid > shardEnd) + break; + if (seqid < shardBegin) + continue; + shards[(int) (seqid % shardSize)] = fecPacket.Data; + shardsflag[(int) (seqid % shardSize)] = true; + numshard++; + if (fecPacket.Flag == Fec.typeData) + { + numDataShard++; + } + + if (numshard == 1) + { + first = i; + } + + if (fecPacket.Data.ReadableBytes> maxlen) + { + maxlen = fecPacket.Data.ReadableBytes; + } + } + + if (numDataShard == dataShards) + { + freeRange(first, numshard, rx); + } + else if (numshard >= dataShards) + { + for (int i = 0; i < shards.Length; i++) + { + IByteBuffer shard = shards[i]; + //如果数据不存在 用0填充起来 + if (shard == null) + { + shards[i] = zeros.Copy(0, maxlen); + shards[i].SetWriterIndex(maxlen); + continue; + } + + int left = maxlen - shard.ReadableBytes; + if (left > 0) + { + shard.WriteBytes(this.zeros, left); + zeros.ResetReaderIndex(); +// zeros.resetReaderIndex(); + } + } + + codec.decodeMissing(shards, shardsflag, 0, maxlen); + result = new List(this.dataShards); + for (int i = 0; i < shardSize; i++) + { + if (shardsflag[i]) + { + continue; + } + + IByteBuffer byteBufs = shards[i]; + //释放构建的parityShards内存 + if (i >= dataShards) + { + byteBufs.Release(); + continue; + } + + int packageSize = byteBufs.ReadShort(); + if (byteBufs.ReadableBytes < packageSize) + { +//// System.out.println("bytebuf长度: " + byteBufs.writerIndex() + " 读出长度" + packageSize); +// byte[] bytes = new byte[byteBufs.writerIndex()]; +// byteBufs.getBytes(0, bytes); +// for (byte aByte : +// bytes) { +// System.out.print("[" + aByte + "] "); +// } + Snmp.snmp.FECErrs++; + } + else + { + Snmp.snmp.FECRecovered++; + } + + //去除fec头标记的消息体长度2字段 + byteBufs = byteBufs.Slice(Fec.fecDataSize, packageSize); + //int packageSize =byteBufs.readUnsignedShort(); + //byteBufs = byteBufs.slice(0,packageSize); + result.Add(byteBufs); + Snmp.snmp.FECRecovered++; + //int packageSize =byteBufs.getUnsignedShort(0); + ////判断长度 + //if(byteBufs.writerIndex()-Fec.fecHeaderSizePlus2>=packageSize&&packageSize>0) + //{ + // byteBufs = byteBufs.slice(Fec.fecHeaderSizePlus2,packageSize); + // result.add(byteBufs); + // Snmp.snmp.FECRecovered.incrementAndGet(); + //}else{ + // System.out.println("bytebuf长度: "+byteBufs.writerIndex()+" 读出长度"+packageSize); + // byte[] bytes = new byte[byteBufs.writerIndex()]; + // byteBufs.getBytes(0,bytes); + // for (byte aByte : bytes) { + // System.out.print("["+aByte+"] "); + // } + // Snmp.snmp.FECErrs.incrementAndGet(); + //} + } + + freeRange(first, numshard, rx); + } + } + + if (rx.Count> rxlimit) + { + if (rx[0].Flag == Fec.typeData) + { + Snmp.snmp.FECShortShards++; + } + + freeRange(0, 1, rx); + } + + return result; + } + + + public void release(){ + this.parityShards=0; + foreach (var fecPacket in this.rx) + { + fecPacket?.release(); + } + this.zeros.Release(); + } + /** + * 1,回收first后n个bytebuf + * 2,将q的first到first+n之间的数据移除掉 + * 3,将尾部的n个数据的data清空 + * 4,返回开头到尾部n个数组的对象 + * + * @param first + * @param n + * @param q + */ + private static void freeRange(int first,int n,List q){ + for (int i = first; i < first + n; i++) { + q[i].release(); + } + //copy(q[first:], q[first+n:]) + for (int i = first; i < q.Count; i++) { + int index = i+n; + if(index==q.Count) + break; + q[i]=q[index]; + } + //for (int i = 0; i < n; i++) { + // q.get(q.size()-1-i).setData(null); + //} + for (int i = 0; i < n; i++) { + q.RemoveAt(q.Count-1); + } + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/fec/FecDecode.cs.meta b/Runtime/csharp-kcp/base-kcp/fec/FecDecode.cs.meta new file mode 100644 index 0000000..305a834 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/FecDecode.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: fbba90a1521938a47a4108196366b5fa +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/fec/FecEncode.cs b/Runtime/csharp-kcp/base-kcp/fec/FecEncode.cs new file mode 100644 index 0000000..f23673e --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/FecEncode.cs @@ -0,0 +1,178 @@ +using System; +using DotNetty.Buffers; +using fec; + +namespace fec.fec +{ + public class FecEncode + { + /**消息包长度**/ + private readonly int dataShards; + + /**冗余包长度**/ + private readonly int parityShards; + + /** dataShards+parityShards **/ + private int shardSize; + + //Protect Against Wrapped Sequence numbers + private readonly long paws; + + // next seqid + private long next; + + //count the number of datashards collected + private int shardCount; + + // record maximum data length in datashard + private int maxSize; + + // FEC header offset + private readonly int headerOffset; + + // FEC payload offset + private readonly int payloadOffset; + + //用完需要手动release + private readonly IByteBuffer[] shardCache; + private readonly IByteBuffer[] encodeCache; + + private readonly IByteBuffer zeros; + + private readonly ReedSolomon codec; + + public FecEncode(int headerOffset, ReedSolomon codec, int mtu) + { + this.dataShards = codec.getDataShardCount(); + this.parityShards = codec.getParityShardCount(); + this.shardSize = this.dataShards + this.parityShards; + //this.paws = (Integer.MAX_VALUE/shardSize - 1) * shardSize; + this.paws = 0xffffffffL / shardSize * shardSize; + this.headerOffset = headerOffset; + this.payloadOffset = headerOffset + Fec.fecHeaderSize; + this.codec = codec; + this.shardCache = new IByteBuffer[shardSize]; + this.encodeCache = new IByteBuffer[parityShards]; + zeros = PooledByteBufferAllocator.Default.DirectBuffer(mtu); + zeros.WriteBytes(new byte[mtu]); + } + + /** + * + * 使用方法: + * 1,入bytebuf后 把bytebuf发送出去,并释放bytebuf + * 2,判断返回值是否为null,如果不为null发送出去并释放它 + * + * headerOffset +6字节fectHead + 2字节bodylenth(lenth-headerOffset-6) + * + * 1,对数据写入头标记为数据类型 markData + * 2,写入消息长度 + * 3,获得缓存数据中最大长度,其他的缓存进行扩容到同样长度 + * 4,去掉头长度,进行fec编码 + * 5,对冗余字节数组进行标记为fec makefec + * 6,返回完整长度 + * + * 注意: 传入的bytebuf如果需要释放在传入后手动释放。 + * 返回的bytebuf 也需要自己释放 + * @param byteBuf + * @return + */ + public IByteBuffer[] encode(IByteBuffer byteBuf) + { + markData(byteBuf, headerOffset); + int sz = byteBuf.WriterIndex; + byteBuf.SetShort(payloadOffset, sz - headerOffset - Fec.fecHeaderSizePlus2); + this.shardCache[shardCount] = byteBuf.RetainedDuplicate(); + this.shardCount++; + if (sz > this.maxSize) + { + this.maxSize = sz; + } + + if (shardCount != dataShards) + { + return null; + } + + //填充parityShards + for (int i = 0; i < parityShards; i++) + { + IByteBuffer parityByte = PooledByteBufferAllocator.Default.DirectBuffer(this.maxSize); + shardCache[i + dataShards] = parityByte; + encodeCache[i] = parityByte; + markParity(parityByte, headerOffset); + parityByte.SetWriterIndex(this.maxSize); + } + + //按着最大长度不足补充0 + for (var i = 0; i < this.dataShards; i++) + { + var shard = shardCache[i]; + var left = this.maxSize - shard.WriterIndex; + if (left <= 0) + continue; + //是否需要扩容 会出现吗?? + //if(shard.capacity()=this.paws) { + // this.next++; + // //this.next = (this.next + 1) % this.paws; + //} + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/fec/FecEncode.cs.meta b/Runtime/csharp-kcp/base-kcp/fec/FecEncode.cs.meta new file mode 100644 index 0000000..dd0af99 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/FecEncode.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 0986045e6f298d046958e875faa6fc67 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/fec/FecExpansion.cs b/Runtime/csharp-kcp/base-kcp/fec/FecExpansion.cs new file mode 100644 index 0000000..4cfec99 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/FecExpansion.cs @@ -0,0 +1,188 @@ +using System; +using DotNetty.Buffers; + +namespace fec +{ + public static class FecExpansion + { + private static readonly InputOutputByteBufTableCodingLoop LOOP = new InputOutputByteBufTableCodingLoop(); + + + public static void encodeParity(this ReedSolomon reedSolomon, IByteBuffer[] shards, int offset, int byteCount) + { + // Check arguments. + reedSolomon.checkBuffersAndSizes(shards, offset, byteCount); + + // Build the array of output buffers. + IByteBuffer[] outputs = new IByteBuffer [reedSolomon.ParityShardCount]; + Array.Copy(shards, reedSolomon.DataShardCount, outputs, 0, reedSolomon.ParityShardCount); +// System.arraycopy(shards, dataShardCount, outputs, 0, parityShardCount); + + // Do the coding. + LOOP.codeSomeShards( + reedSolomon.ParityRows, + shards, reedSolomon.DataShardCount, + outputs, reedSolomon.ParityShardCount, + offset, byteCount); + } + + + /** + * Checks the consistency of arguments passed to public methods. + */ + private static void checkBuffersAndSizes(this ReedSolomon reedSolomon, IByteBuffer[] shards, int offset, + int byteCount) + { + // The number of buffers should be equal to the number of + // data shards plus the number of parity shards. + if (shards.Length != reedSolomon.TotalShardCount) + { + throw new Exception("wrong number of shards: " + shards.Length); + } + + // All of the shard buffers should be the same length. + int shardLength = shards[0].ReadableBytes; + for (int i = 1; i < shards.Length; i++) + { + if (shards[i].ReadableBytes != shardLength) + { + throw new Exception("Shards are different sizes"); + } + } + + // The offset and byteCount must be non-negative and fit in the buffers. + if (offset < 0) + { + throw new Exception("offset is negative: " + offset); + } + + if (byteCount < 0) + { + throw new Exception("byteCount is negative: " + byteCount); + } + + if (shardLength < offset + byteCount) + { + throw new Exception("buffers to small: " + byteCount + offset); + } + } + + + public static void decodeMissing(this ReedSolomon reedSolomon, IByteBuffer[] shards, + bool[] shardPresent, + int offset, + int byteCount) + { + // Check arguments. + reedSolomon.checkBuffersAndSizes(shards, offset, byteCount); + + // Quick check: are all of the shards present? If so, there's + // nothing to do. + int numberPresent = 0; + for (int i = 0; i < reedSolomon.TotalShardCount; i++) + { + if (shardPresent[i]) + { + numberPresent += 1; + } + } + + if (numberPresent == reedSolomon.TotalShardCount) + { + // Cool. All of the shards data data. We don't + // need to do anything. + return; + } + + // More complete sanity check + if (numberPresent < reedSolomon.DataShardCount) + { + throw new Exception("Not enough shards present"); + } + + // Pull out the rows of the matrix that correspond to the + // shards that we have and build a square matrix. This + // matrix could be used to generate the shards that we have + // from the original data. + // + // Also, pull out an array holding just the shards that + // correspond to the rows of the submatrix. These shards + // will be the input to the decoding process that re-creates + // the missing data shards. + Matrix subMatrix = new Matrix(reedSolomon.DataShardCount, reedSolomon.DataShardCount); + IByteBuffer[] subShards = new IByteBuffer[reedSolomon.DataShardCount]; + { + int subMatrixRow = 0; + for (int matrixRow = 0; + matrixRow < reedSolomon.TotalShardCount && subMatrixRow < reedSolomon.DataShardCount; + matrixRow++) + { + if (shardPresent[matrixRow]) + { + for (int c = 0; c < reedSolomon.DataShardCount; c++) + { + subMatrix.set(subMatrixRow, c, reedSolomon.Matrix.get(matrixRow, c)); + } + + subShards[subMatrixRow] = shards[matrixRow]; + subMatrixRow += 1; + } + } + } + + // Invert the matrix, so we can go from the encoded shards + // back to the original data. Then pull out the row that + // generates the shard that we want to decode. Note that + // since this matrix maps back to the orginal data, it can + // be used to create a data shard, but not a parity shard. + Matrix dataDecodeMatrix = subMatrix.invert(); + + // Re-create any data shards that were missing. + // + // The input to the coding is all of the shards we actually + // have, and the output is the missing data shards. The computation + // is done using the special decode matrix we just built. + IByteBuffer[] outputs = new IByteBuffer[reedSolomon.ParityShardCount]; + byte[][] matrixRows = new byte [reedSolomon.ParityShardCount][]; + int outputCount = 0; + for (int iShard = 0; iShard < reedSolomon.DataShardCount; iShard++) + { + if (!shardPresent[iShard]) + { + outputs[outputCount] = shards[iShard]; + matrixRows[outputCount] = dataDecodeMatrix.getRow(iShard); + outputCount += 1; + } + } + + LOOP.codeSomeShards( + matrixRows, + subShards, reedSolomon.DataShardCount, + outputs, outputCount, + offset, byteCount); + + // Now that we have all of the data shards intact, we can + // compute any of the parity that is missing. + // + // The input to the coding is ALL of the data shards, including + // any that we just calculated. The output is whichever of the + // data shards were missing. + outputCount = 0; + for (int iShard = reedSolomon.DataShardCount; iShard < reedSolomon.TotalShardCount; iShard++) + { + if (!shardPresent[iShard]) + { + outputs[outputCount] = shards[iShard]; + matrixRows[outputCount] = reedSolomon.ParityRows[iShard - reedSolomon.DataShardCount]; + outputCount += 1; + } + } + + LOOP.codeSomeShards( + matrixRows, + shards, reedSolomon.DataShardCount, + outputs, outputCount, + offset, byteCount); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/fec/FecExpansion.cs.meta b/Runtime/csharp-kcp/base-kcp/fec/FecExpansion.cs.meta new file mode 100644 index 0000000..9af388e --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/FecExpansion.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b7f1d362e6ae0a74fb33d0aac51d8286 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/fec/FecPacket.cs b/Runtime/csharp-kcp/base-kcp/fec/FecPacket.cs new file mode 100644 index 0000000..4f25d65 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/FecPacket.cs @@ -0,0 +1,59 @@ +using DotNetty.Buffers; +using DotNetty.Common; + +namespace fec.fec +{ + public class FecPacket + { + private long seqid; + private int flag; + private IByteBuffer data; + + private readonly ThreadLocalPool.Handle recyclerHandle; + + + private static readonly ThreadLocalPool fecPacketRecycler = + new ThreadLocalPool(handle => new FecPacket(handle)); + + private FecPacket(ThreadLocalPool.Handle recyclerHandle) + { + this.recyclerHandle = recyclerHandle; + } + + + public static FecPacket newFecPacket(IByteBuffer byteBuf) + { + FecPacket pkt = fecPacketRecycler.Take(); + pkt.seqid = byteBuf.ReadUnsignedIntLE(); + pkt.flag = byteBuf.ReadUnsignedShortLE(); + pkt.data = byteBuf.RetainedSlice(byteBuf.ReaderIndex, byteBuf.Capacity - byteBuf.ReaderIndex); + pkt.data.SetWriterIndex(byteBuf.ReadableBytes); + return pkt; + } + + + public void release() + { + this.seqid = 0; + this.flag = 0; + this.data.Release(); + this.data = null; + recyclerHandle.Release(this); + } + + public long Seqid + { + get => seqid; + } + + public int Flag + { + get => flag; + } + + public IByteBuffer Data + { + get => data; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/fec/FecPacket.cs.meta b/Runtime/csharp-kcp/base-kcp/fec/FecPacket.cs.meta new file mode 100644 index 0000000..d0c6bcc --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/FecPacket.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 23c7873815091414e98b4a6505299921 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/fec/InputOutputByteBufTableCodingLoop.cs b/Runtime/csharp-kcp/base-kcp/fec/InputOutputByteBufTableCodingLoop.cs new file mode 100644 index 0000000..4b4085a --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/InputOutputByteBufTableCodingLoop.cs @@ -0,0 +1,101 @@ +using DotNetty.Buffers; + +namespace fec +{ + public class InputOutputByteBufTableCodingLoop : ByteBufCodingLoopBase + { + public void codeSomeShards(byte[][] matrixRows, IByteBuffer[] inputs, int inputCount, IByteBuffer[] outputs, + int outputCount, int offset, int byteCount) + { + byte[][] table = Galois.MULTIPLICATION_TABLE; + + { + int iInput = 0; + IByteBuffer inputShard = inputs[iInput]; + for (int iOutput = 0; iOutput < outputCount; iOutput++) + { + IByteBuffer outputShard = outputs[iOutput]; + byte[] matrixRow = matrixRows[iOutput]; + byte[] multTableRow = table[matrixRow[iInput] & 0xFF]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + outputShard.SetByte(iByte, multTableRow[inputShard.GetByte(iByte) & 0xFF]); + //outputShard[iByte] = multTableRow[inputShard[iByte] & 0xFF]; + } + } + } + + for (int iInput = 1; iInput < inputCount; iInput++) + { + IByteBuffer inputShard = inputs[iInput]; + for (int iOutput = 0; iOutput < outputCount; iOutput++) + { + IByteBuffer outputShard = outputs[iOutput]; + byte[] matrixRow = matrixRows[iOutput]; + byte[] multTableRow = table[matrixRow[iInput] & 0xFF]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + byte temp = outputShard.GetByte(iByte); + temp ^= multTableRow[inputShard.GetByte(iByte) & 0xFF]; + outputShard.SetByte(iByte, temp); + //outputShard[iByte] ^= multTableRow[inputShard[iByte] & 0xFF]; + } + } + } + } + + + public bool checkSomeShards( + byte[][] matrixRows, + IByteBuffer[] inputs, int inputCount, + byte[][] toCheck, int checkCount, + int offset, int byteCount, + byte[] tempBuffer) + { + if (tempBuffer == null) + { + return base.checkSomeShards(matrixRows, inputs, inputCount, toCheck, checkCount, offset, byteCount, + null); + } + + // This is actually the code from OutputInputByteTableCodingLoop. + // Using the loops from this class would require multiple temp + // buffers. + + byte[][] table = Galois.MULTIPLICATION_TABLE; + for (int iOutput = 0; iOutput < checkCount; iOutput++) + { + byte[] outputShard = toCheck[iOutput]; + byte[] matrixRow = matrixRows[iOutput]; + { + int iInput = 0; + IByteBuffer inputShard = inputs[iInput]; + byte[] multTableRow = table[matrixRow[iInput] & 0xFF]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + tempBuffer[iByte] = multTableRow[inputShard.GetByte(iByte) & 0xFF]; + } + } + for (int iInput = 1; iInput < inputCount; iInput++) + { + IByteBuffer inputShard = inputs[iInput]; + byte[] multTableRow = table[matrixRow[iInput] & 0xFF]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + tempBuffer[iByte] ^= multTableRow[inputShard.GetByte(iByte) & 0xFF]; + } + } + + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + if (tempBuffer[iByte] != outputShard[iByte]) + { + return false; + } + } + } + + return true; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/fec/InputOutputByteBufTableCodingLoop.cs.meta b/Runtime/csharp-kcp/base-kcp/fec/InputOutputByteBufTableCodingLoop.cs.meta new file mode 100644 index 0000000..74688ae --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/InputOutputByteBufTableCodingLoop.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: ab49272815d14f54f9e86868440eac20 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/base-kcp/fec/Snmp.cs b/Runtime/csharp-kcp/base-kcp/fec/Snmp.cs new file mode 100644 index 0000000..77b5fdf --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/Snmp.cs @@ -0,0 +1,116 @@ +namespace fec +{ + public class Snmp + { + // bytes sent from upper level + public int BytesSent; + + // bytes received to upper level + public int BytesReceived; + + // max number of connections ever reached + public int MaxConn; + + // accumulated active open connections + public int ActiveOpens; + + // accumulated passive open connections + public int PassiveOpens; + + // current number of established connections + public int CurrEstab; + + // UDP read errors reported from net.PacketConn + public int InErrs; + + // checksum errors from CRC32 + public int InCsumErrors; + + // packet iput errors reported from KCP + public int KCPInErrors; + + // incoming packets count + public int InPkts; + + // outgoing packets count + public int OutPkts; + + // incoming KCP segments + public int InSegs; + + // outgoing KCP segments + public int OutSegs; + + // UDP bytes received + public int InBytes; + + // UDP bytes sent + public int OutBytes; + + // accmulated retransmited segments + public int RetransSegs; + + // accmulated fast retransmitted segments + public int FastRetransSegs; + + // accmulated early retransmitted segments + public int EarlyRetransSegs; + + // number of segs infered as lost + public int LostSegs; + + // number of segs duplicated + public int RepeatSegs; + + // correct packets recovered from FEC + public int FECRecovered; + + // incorrect packets recovered from FEC + public int FECErrs; + + // 收到的 Data数量 + public int FECDataShards; + + // 收到的 Parity数量 + public int FECParityShards; + + // number of data shards that's not enough for recovery + public int FECShortShards; + + // number of data shards that's not enough for recovery + public int FECRepeatDataShards; + + public static volatile Snmp snmp = new Snmp(); + + public override string ToString() { + return "Snmp{" + + "BytesSent=" + BytesSent + + ", BytesReceived=" + BytesReceived + + ", MaxConn=" + MaxConn + + ", ActiveOpens=" + ActiveOpens + + ", PassiveOpens=" + PassiveOpens + + ", CurrEstab=" + CurrEstab + + ", InErrs=" + InErrs + + ", InCsumErrors=" + InCsumErrors + + ", KCPInErrors=" + KCPInErrors + + ", 收到包=" + InPkts + + ", 发送包=" + OutPkts + + ", InSegs=" + InSegs + + ", OutSegs=" + OutSegs + + ", 收到字节=" + InBytes + + ", 发送字节=" + OutBytes + + ", 总共重发数=" + RetransSegs + + ", 快速重发数=" + FastRetransSegs + + ", 空闲快速重发数=" + EarlyRetransSegs + + ", 超时重发数=" + LostSegs + + ", 收到重复包数量=" + RepeatSegs + + ", fec恢复数=" + FECRecovered + + ", fec恢复错误数=" + FECErrs + + ", 收到fecData数=" + FECDataShards + + ", 收到fecParity数=" + FECParityShards + + ", fec缓存冗余淘汰data包数=" + FECShortShards + + ", fec收到重复的数据包=" + FECRepeatDataShards + + '}'; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/base-kcp/fec/Snmp.cs.meta b/Runtime/csharp-kcp/base-kcp/fec/Snmp.cs.meta new file mode 100644 index 0000000..3796f23 --- /dev/null +++ b/Runtime/csharp-kcp/base-kcp/fec/Snmp.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 257fc0b23d23d324abe13909869a853a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp.meta b/Runtime/csharp-kcp/dotNetty-kcp.meta new file mode 100644 index 0000000..c95c913 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: a2ef5bb259ffcb54abc3b28267641762 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ChannelConfig.cs b/Runtime/csharp-kcp/dotNetty-kcp/ChannelConfig.cs new file mode 100644 index 0000000..0bddc1d --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ChannelConfig.cs @@ -0,0 +1,171 @@ +using base_kcp; +using fec.fec; + +namespace dotNetty_kcp +{ + public class ChannelConfig + { + private bool nodelay; + private int interval = Kcp.IKCP_INTERVAL; + private int fastresend; + private bool nocwnd; + private int sndwnd = Kcp.IKCP_WND_SND; + private int rcvwnd = Kcp.IKCP_WND_RCV; + private int mtu = Kcp.IKCP_MTU_DEF; + private int minRto = Kcp.IKCP_RTO_MIN; + //超时时间 超过一段时间没收到消息断开连接 + private long timeoutMillis; + //TODO 可能有bug还未测试 + private bool stream; + + //下面为新增参数 + private int fecDataShardCount; + private int fecParityShardCount; + //收到包立刻回传ack包 + private bool ackNoDelay = false; + //发送包立即调用flush 延迟低一些 cpu增加 如果interval值很小 建议关闭该参数 + private bool fastFlush = true; + //crc32校验 + private bool crc32Check = false; + + //增加ack包回复成功率 填 /8/16/32 + private int ackMaskSize = 0; + /**使用conv确定一个channel 还是使用 socketAddress确定一个channel**/ + private bool useConvChannel=false; + /**预留长度**/ + private int reserved; + + + public void initNodelay(bool nodelay, int interval, int resend, bool nc){ + this.nodelay = nodelay; + this.interval = interval; + this.fastresend = resend; + this.nocwnd=nc; + } + + + public int Conv { get; set; } + + public bool Nodelay + { + get => nodelay; + set => nodelay = value; + } + + public int Interval + { + get => interval; + set => interval = value; + } + + public int Fastresend + { + get => fastresend; + set => fastresend = value; + } + + public bool Nocwnd + { + get => nocwnd; + set => nocwnd = value; + } + + public int Sndwnd + { + get => sndwnd; + set => sndwnd = value; + } + + public int Rcvwnd + { + get => rcvwnd; + set => rcvwnd = value; + } + + public int Mtu + { + get => mtu; + set => mtu = value; + } + + public int MinRto + { + get => minRto; + set => minRto = value; + } + + public long TimeoutMillis + { + get => timeoutMillis; + set => timeoutMillis = value; + } + + public bool Stream + { + get => stream; + set => stream = value; + } + + public int FecDataShardCount + { + get => fecDataShardCount; + set + { + if (value > 0) + { + reserved += Fec.fecHeaderSizePlus2; + } + fecDataShardCount = value; + } + } + + public int FecParityShardCount + { + get => fecParityShardCount; + set => fecParityShardCount = value; + } + + public bool AckNoDelay + { + get => ackNoDelay; + set => ackNoDelay = value; + } + + public bool FastFlush + { + get => fastFlush; + set => fastFlush = value; + } + + public bool Crc32Check + { + get => crc32Check; + set + { + if (value) + { + reserved += Ukcp.HEADER_CRC; + } + crc32Check = value; + } + } + + public int AckMaskSize + { + get => ackMaskSize; + set => ackMaskSize = value; + } + + + public int Reserved + { + get => reserved; + } + + public bool UseConvChannel + { + get => useConvChannel; + set => useConvChannel = value; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ChannelConfig.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/ChannelConfig.cs.meta new file mode 100644 index 0000000..e544c1e --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ChannelConfig.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: d88e5ad743feb784f890f9aa90b3c369 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ClientChannelHandler.cs b/Runtime/csharp-kcp/dotNetty-kcp/ClientChannelHandler.cs new file mode 100644 index 0000000..278f6a9 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ClientChannelHandler.cs @@ -0,0 +1,33 @@ +using System; +using DotNetty.Transport.Channels; +using DotNetty.Transport.Channels.Sockets; +using dotNetty_kcp.thread; + +namespace dotNetty_kcp +{ + public class ClientChannelHandler:ChannelHandlerAdapter + { + private readonly IChannelManager _channelManager; + + private readonly ChannelConfig _channelConfig; + + + public ClientChannelHandler(IChannelManager channelManager,ChannelConfig channelConfig) + { + this._channelManager = channelManager; + this._channelConfig = channelConfig; + } + + public override void ExceptionCaught(IChannelHandlerContext context, Exception exception) + { + Console.WriteLine(exception); + } + + public override void ChannelRead(IChannelHandlerContext context, object message) + { + var msg = (DatagramPacket) message; + var ukcp = _channelManager.get(msg); + ukcp.read(msg.Content); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ClientChannelHandler.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/ClientChannelHandler.cs.meta new file mode 100644 index 0000000..6fd9dea --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ClientChannelHandler.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: a8669f662ea204e4a92123d163414480 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ClientConvChannelManager.cs b/Runtime/csharp-kcp/dotNetty-kcp/ClientConvChannelManager.cs new file mode 100644 index 0000000..522e342 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ClientConvChannelManager.cs @@ -0,0 +1,66 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Net; +using System.Xml.Linq; +using DotNetty.Transport.Channels; +using DotNetty.Transport.Channels.Sockets; + +namespace dotNetty_kcp +{ + /** + * 根据conv确定一个session + */ + public class ClientConvChannelManager : IChannelManager + { + + private readonly ConcurrentDictionary _ukcps = new ConcurrentDictionary(); + + private readonly int convIndex; + + public ClientConvChannelManager(int convIndex) + { + this.convIndex = convIndex; + } + + public Ukcp get(DatagramPacket msg) + { + var conv = getConv(msg); + _ukcps.TryGetValue(conv, out var ukcp); + return ukcp; + } + + private int getConv(DatagramPacket msg) { + var bytebuffer = msg.Content; + return bytebuffer.GetIntLE(convIndex);; + } + + + + public void New(EndPoint endPoint, Ukcp ukcp,DatagramPacket msg) + { + var conv = ukcp.getConv(); + if (msg != null) { + conv = getConv(msg); + ukcp.setConv(conv); + } + _ukcps.TryAdd(conv, ukcp); + } + + public void del(Ukcp ukcp) + { + _ukcps.TryRemove(ukcp.getConv(), out var temp); + if (temp == null) + { + Console.WriteLine("ukcp session is not exist conv: " + ukcp.getConv()); + } + ukcp.user().Channel.CloseAsync(); + } + + public ICollection getAll() + { + return this._ukcps.Values; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ClientConvChannelManager.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/ClientConvChannelManager.cs.meta new file mode 100644 index 0000000..ee119ff --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ClientConvChannelManager.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 2c360702063140340b74b72506580816 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ClientEndPointChannelManager.cs b/Runtime/csharp-kcp/dotNetty-kcp/ClientEndPointChannelManager.cs new file mode 100644 index 0000000..9829c24 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ClientEndPointChannelManager.cs @@ -0,0 +1,39 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Net; +using DotNetty.Transport.Channels.Sockets; + +namespace dotNetty_kcp +{ + public class ClientEndPointChannelManager : IChannelManager + { + private readonly ConcurrentDictionary _ukcps = new ConcurrentDictionary(); + + public Ukcp get(DatagramPacket msg) + { + _ukcps.TryGetValue(msg.Recipient, out var ukcp); + return ukcp; + } + + public void New(EndPoint endPoint, Ukcp ukcp, DatagramPacket msg) + { + _ukcps[endPoint] = ukcp; + } + + public void del(Ukcp ukcp) + { + _ukcps.TryRemove(ukcp.user().LocalAddress, out var temp); + if (temp == null) + { + Console.WriteLine("ukcp session is not exist RemoteAddress: " + ukcp.user().RemoteAddress); + } + ukcp.user().Channel.CloseAsync(); + } + + public ICollection getAll() + { + return _ukcps.Values; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ClientEndPointChannelManager.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/ClientEndPointChannelManager.cs.meta new file mode 100644 index 0000000..e9aff20 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ClientEndPointChannelManager.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 1ff0643546fcb4245b31d92384a0c882 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/CloseTask.cs b/Runtime/csharp-kcp/dotNetty-kcp/CloseTask.cs new file mode 100644 index 0000000..589d251 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/CloseTask.cs @@ -0,0 +1,19 @@ +using dotNetty_kcp.thread; + +namespace dotNetty_kcp +{ + public class CloseTask:ITask + { + private Ukcp _ukcp; + + public CloseTask(Ukcp ukcp) + { + _ukcp = ukcp; + } + + public void execute() + { + _ukcp.internalClose(); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/CloseTask.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/CloseTask.cs.meta new file mode 100644 index 0000000..940cc6d --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/CloseTask.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: c9bf6f8e34ef3894d81e39e29b39519a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/CodecOutputList.cs b/Runtime/csharp-kcp/dotNetty-kcp/CodecOutputList.cs new file mode 100644 index 0000000..a923bb8 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/CodecOutputList.cs @@ -0,0 +1,39 @@ +using System.Collections.Generic; +using DotNetty.Common; + +namespace dotNetty_kcp +{ + public class CodecOutputList:List + { + + const int DefaultInitialCapacity =16; + + static readonly ThreadLocalPool> Pool = new ThreadLocalPool>(handle => new CodecOutputList(handle)); + + readonly ThreadLocalPool.Handle returnHandle; + + CodecOutputList(ThreadLocalPool.Handle returnHandle) + { + this.returnHandle = returnHandle; + } + + public static CodecOutputList NewInstance() => NewInstance(DefaultInitialCapacity); + + public static CodecOutputList NewInstance(int minCapacity) + { + CodecOutputList ret = Pool.Take(); + if (ret.Capacity < minCapacity) + { + ret.Capacity = minCapacity; + } + return ret; + + } + + public void Return() + { + this.Clear(); + this.returnHandle.Release(this); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/CodecOutputList.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/CodecOutputList.cs.meta new file mode 100644 index 0000000..cfa493e --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/CodecOutputList.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 98728d44f6484fa4c94a76f1060cdca0 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ConnectTask.cs b/Runtime/csharp-kcp/dotNetty-kcp/ConnectTask.cs new file mode 100644 index 0000000..9520a62 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ConnectTask.cs @@ -0,0 +1,31 @@ +using System; +using dotNetty_kcp.thread; + +namespace dotNetty_kcp +{ + public class ConnectTask:ITask + { + private readonly Ukcp _ukcp; + + private readonly KcpListener _listener; + + public ConnectTask(Ukcp ukcp, KcpListener listener) + { + _ukcp = ukcp; + _listener = listener; + } + + + public void execute() + { + try + { + _listener.onConnected(_ukcp); + } + catch (Exception e) + { + _listener.handleException(e,_ukcp); + } + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ConnectTask.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/ConnectTask.cs.meta new file mode 100644 index 0000000..23d1ace --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ConnectTask.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 61e976fc47fdced4aaf0c8a2394d0293 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/Crc32.cs b/Runtime/csharp-kcp/dotNetty-kcp/Crc32.cs new file mode 100644 index 0000000..ee69895 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/Crc32.cs @@ -0,0 +1,104 @@ +using System; +using System.Text; +using DotNetty.Buffers; + +namespace dotNetty_kcp +{ + /// + /// Computes a CRC32 checksum. + /// + /// Based on + public static class Crc32 + { + readonly static uint[] Table = CreateTable(); + + static Crc32() + { + } + +// /// +// /// Compute the checksum of a UTF8 text. +// /// +// /// Text to calculate +// /// Checksum +// public static int ComputeChecksum(string text) +// { +// return ComputeChecksum(text, Encoding.UTF8); +// } +// +// /// +// /// Compute the checksum of a text using a specific encoding. +// /// +// /// Text to calculate +// /// Text encoding +// /// Checksum +// public static int ComputeChecksum(string text, Encoding encoding) +// { +// if (string.IsNullOrEmpty(text)) return 0; +// byte[] bytes = encoding.GetBytes(text); +// return ComputeChecksum(bytes); +// } + + /// + /// Compute the checksum of a binary buffer. + /// + /// Buffer to calculate + /// + public static int ComputeChecksum(sbyte[] bytes) + { + uint crc = 0xffffffff; + for (int i = 0; i < bytes.Length; i++) + { + byte index = (byte) (((crc) & 0xff) ^ bytes[i]); + crc = (crc >> 8) ^ Table[index]; + } + + return unchecked((int) ~crc); + } + + + + /// + /// Compute the checksum of a binary buffer. + /// + /// Buffer to calculate + /// + public static uint ComputeChecksum(IByteBuffer byteBuffer,int offset,int lenth) + { + var crc = 0xffffffff; + lenth += offset; + for (var i = offset; i < lenth; i++) + { + var index = (byte) (((crc) & 0xff) ^ byteBuffer.GetByte(i)); + crc = (crc >> 8) ^ Table[index]; + } + return ~crc; + } + + static uint[] CreateTable() + { + const uint poly = 0xedb88320; + var table = new uint[256]; + uint temp = 0; + for (uint i = 0; i < table.Length; ++i) + { + temp = i; + for (int j = 8; j > 0; --j) + { + if ((temp & 1) == 1) + { + temp = (uint) ((temp >> 1) ^ poly); + } + else + { + temp >>= 1; + } + } + + table[i] = temp; + } + + return table; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/Crc32.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/Crc32.cs.meta new file mode 100644 index 0000000..20cf717 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/Crc32.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 4756f29bf8d5f204f98198ae62bd5c31 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/Crc32OutPut.cs b/Runtime/csharp-kcp/dotNetty-kcp/Crc32OutPut.cs new file mode 100644 index 0000000..41906f8 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/Crc32OutPut.cs @@ -0,0 +1,25 @@ +using System; +using base_kcp; +using DotNetty.Buffers; + +namespace dotNetty_kcp +{ + public class Crc32OutPut:KcpOutput + { + private readonly KcpOutput _output; + private readonly int _headerOffset; + + public Crc32OutPut(KcpOutput output,int headerOffset) { + _output = output; + _headerOffset = headerOffset; + } + + public void outPut(IByteBuffer data, Kcp kcp) + { + var checksum =Crc32.ComputeChecksum(data, _headerOffset + Ukcp.HEADER_CRC, + data.ReadableBytes - _headerOffset - Ukcp.HEADER_CRC); + data.SetUnsignedIntLE(_headerOffset, checksum); + _output.outPut(data,kcp); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/Crc32OutPut.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/Crc32OutPut.cs.meta new file mode 100644 index 0000000..a201d2b --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/Crc32OutPut.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 16da701c0d0560a499d4ae30ebe6a893 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/FecOutPut.cs b/Runtime/csharp-kcp/dotNetty-kcp/FecOutPut.cs new file mode 100644 index 0000000..19ccd10 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/FecOutPut.cs @@ -0,0 +1,32 @@ +using base_kcp; +using DotNetty.Buffers; +using fec.fec; + +namespace dotNetty_kcp +{ + public class FecOutPut :KcpOutput + { + private readonly KcpOutput output; + + private readonly FecEncode fecEncode; + + public FecOutPut(KcpOutput output, FecEncode fecEncode) + { + this.output = output; + this.fecEncode = fecEncode; + } + + public void outPut(IByteBuffer data, Kcp kcp) + { + var byteBufs = fecEncode.encode(data); + //out之后会自动释放你内存 + output.outPut(data,kcp); + if(byteBufs==null) + return; + foreach (var parityByteBuf in byteBufs) + { + output.outPut(parityByteBuf,kcp); + } + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/FecOutPut.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/FecOutPut.cs.meta new file mode 100644 index 0000000..cab0b31 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/FecOutPut.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: eb2d547a97b37cc4a9176a4933ed6ef7 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/IChannelManager.cs b/Runtime/csharp-kcp/dotNetty-kcp/IChannelManager.cs new file mode 100644 index 0000000..ccf6bda --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/IChannelManager.cs @@ -0,0 +1,19 @@ +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Net; +using DotNetty.Transport.Channels; +using DotNetty.Transport.Channels.Sockets; + +namespace dotNetty_kcp +{ + public interface IChannelManager + { + Ukcp get(DatagramPacket msg); + + void New(EndPoint endPoint, Ukcp ukcp, DatagramPacket msg); + + void del(Ukcp ukcp); + + ICollection getAll(); + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/IChannelManager.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/IChannelManager.cs.meta new file mode 100644 index 0000000..71c8d89 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/IChannelManager.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 3377f6e73b5770a44823bb614f7fe659 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/IScheduleTask.cs b/Runtime/csharp-kcp/dotNetty-kcp/IScheduleTask.cs new file mode 100644 index 0000000..25716a0 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/IScheduleTask.cs @@ -0,0 +1,11 @@ +using dotNetty_kcp.thread; +using DotNetty.Common.Concurrency; +using DotNetty.Common.Utilities; + +namespace dotNetty_kcp +{ + public interface IScheduleTask:ITimerTask,IRunnable + { + + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/IScheduleTask.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/IScheduleTask.cs.meta new file mode 100644 index 0000000..c7d6cf6 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/IScheduleTask.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 96826846741b66c4f8d13201278aac97 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/KcpClient.cs b/Runtime/csharp-kcp/dotNetty-kcp/KcpClient.cs new file mode 100644 index 0000000..dea436e --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/KcpClient.cs @@ -0,0 +1,192 @@ +using System; +using System.Net; +using System.Net.Sockets; +using base_kcp; +using DotNetty.Transport.Bootstrapping; +using DotNetty.Transport.Channels; +using DotNetty.Transport.Channels.Sockets; +using dotNetty_kcp.thread; +using fec; +using fec.fec; + +namespace dotNetty_kcp +{ + /** + * kcp客户端 + * 客户端使用方式: + * 1,与服务器tcp通讯得到conv + * 2,kcp通过conv标识与服务器通讯 + * 3,客户端发现网络断开重连之后必须通过kcp发送一个心跳包出去 用于服务器确定客户端的出口地址 + * 4,客户端需要最少每60秒发送一个心跳数据包服务端收到后回复客户端用于 路由表记录映射信息 + */ + public class KcpClient + { + private Bootstrap bootstrap; + + private IExecutorPool _executorPool; + + private IChannelManager _channelManager; + + private IEventLoopGroup _eventLoopGroup; + + private IScheduleThread _scheduleThread; + + + private static IChannel bindLocal(Bootstrap bootstrap,EndPoint localAddress = null) + { + if (localAddress == null) + { + localAddress = new IPEndPoint(IPAddress.Any, 0); + } + + return bootstrap.BindAsync(localAddress).Result; + } + + public System.Threading.Tasks.Task BindLocal() + { + var localAddress = new IPEndPoint(IPAddress.Any, 0); + return bootstrap.BindAsync(localAddress); + } + + public void init(ChannelConfig channelConfig,ExecutorPool executorPool,IEventLoopGroup eventLoopGroup) + { + if(channelConfig.UseConvChannel) + { + var convIndex = 0; + if(channelConfig.Crc32Check) + { + convIndex+=Ukcp.HEADER_CRC; + } + if(channelConfig.FecDataShardCount!=0&&channelConfig.FecParityShardCount!=0) + { + convIndex+= Fec.fecHeaderSizePlus2; + } + _channelManager = new ClientConvChannelManager(convIndex); + } + else + { + _channelManager = new ClientEndPointChannelManager(); + } + + //初始化线程池 创建一个线程就够了 + _executorPool = executorPool; + _executorPool.CreateMessageExecutor(); + _eventLoopGroup = eventLoopGroup; + + _scheduleThread = new EventLoopScheduleThread(); + + bootstrap = new Bootstrap(); + bootstrap.Group(_eventLoopGroup); + bootstrap.ChannelFactory(() => new SocketDatagramChannel(AddressFamily.InterNetwork)); + bootstrap.Handler(new ActionChannelInitializer(channel => + { + var pipeline = channel.Pipeline; + pipeline.AddLast(new ClientChannelHandler(_channelManager,channelConfig)); + })); + } + + + + public void init(ChannelConfig channelConfig) + { + var executorPool = new ExecutorPool(); + executorPool.CreateMessageExecutor(); + init(channelConfig,executorPool,new MultithreadEventLoopGroup()); + } + + + /** + * 重连接口 + * 使用旧的kcp对象,出口ip和端口替换为新的 + * 在4G切换为wifi等场景使用 + * @param ukcp + */ + public void reconnect(Ukcp ukcp){ + if (!(_channelManager is ServerConvChannelManager)) + { + throw new Exception("reconnect can only be used in convChannel"); + } + ukcp.IMessageExecutor.execute(new ReconnectTask(ukcp,bootstrap)); + } + + private class ReconnectTask : ITask + { + private readonly Ukcp _ukcp; + private readonly Bootstrap _bootstrap; + + public ReconnectTask(Ukcp ukcp, Bootstrap bootstrap) + { + _ukcp = ukcp; + _bootstrap = bootstrap; + } + + public void execute() + { + _ukcp.user().Channel.CloseAsync(); + var iChannel = bindLocal(_bootstrap); + _ukcp.user().Channel = iChannel; + } + } + + + public Ukcp connect(IChannel localChannel,EndPoint remoteAddress, ChannelConfig channelConfig, KcpListener kcpListener) + { + + KcpOutput kcpOutput = new KcpOutPutImp(); + ReedSolomon reedSolomon = null; + if (channelConfig.FecDataShardCount != 0 && channelConfig.FecParityShardCount != 0) + { + reedSolomon = ReedSolomon.create(channelConfig.FecDataShardCount, channelConfig.FecParityShardCount); + } + + var _messageExecutor = _executorPool.GetAutoMessageExecutor(); + + var ukcp = new Ukcp(kcpOutput, kcpListener, _messageExecutor, reedSolomon, channelConfig); + + var user = new User(localChannel, remoteAddress, localChannel.LocalAddress); + ukcp.user(user); + + _channelManager.New(localChannel.LocalAddress, ukcp,null); + + _messageExecutor.execute(new ConnectTask(ukcp, kcpListener)); + + var scheduleTask = new ScheduleTask( _channelManager, ukcp,_scheduleThread); + + _scheduleThread.schedule(scheduleTask,TimeSpan.FromMilliseconds(ukcp.getInterval())); + return ukcp; + } + + /** + * 连接一个服务器 + */ + public Ukcp connect(EndPoint localAddress,EndPoint remoteAddress, ChannelConfig channelConfig, KcpListener kcpListener) + { + var channel = bindLocal(bootstrap,localAddress); + return connect(channel, remoteAddress, channelConfig, kcpListener); + } + + /** + * 连接一个服务器 + */ + public Ukcp connect(EndPoint remoteAddress, ChannelConfig channelConfig, KcpListener kcpListener) + { + var channel = bindLocal(bootstrap); + return connect(channel, remoteAddress, channelConfig, kcpListener); + } + + + public void stop() + { + foreach (var ukcp in _channelManager.getAll()) + { + ukcp.close(); + } + _executorPool?.stop(false); + if (_eventLoopGroup != null&&!_eventLoopGroup.IsShuttingDown) + { + _eventLoopGroup?.ShutdownGracefullyAsync().Wait(); + } + _scheduleThread.stop(); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/KcpClient.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/KcpClient.cs.meta new file mode 100644 index 0000000..23a1700 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/KcpClient.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 7a7de561d591c09489caa207f80bf53f +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/KcpListener.cs b/Runtime/csharp-kcp/dotNetty-kcp/KcpListener.cs new file mode 100644 index 0000000..e5b3f2d --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/KcpListener.cs @@ -0,0 +1,39 @@ +using System; +using DotNetty.Buffers; + +namespace dotNetty_kcp +{ + public interface KcpListener + { + + /** + * 连接之后 + * @param ukcp + */ + void onConnected(Ukcp ukcp); + + /** + * kcp message + * + * @param byteBuf the data + * @param ukcp + */ + void handleReceive(IByteBuffer byteBuf, Ukcp ukcp); + + /** + * + * kcp异常,之后此kcp就会被关闭 + * + * @param ex 异常 + * @param ukcp 发生异常的kcp,null表示非kcp错误 + */ + void handleException(Exception ex, Ukcp ukcp); + + /** + * 关闭 + * + * @param ukcp + */ + void handleClose(Ukcp ukcp); + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/KcpListener.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/KcpListener.cs.meta new file mode 100644 index 0000000..cf6476d --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/KcpListener.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: fd434f0381d2a6e4584fbfa72b420ace +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/KcpOutPutImp.cs b/Runtime/csharp-kcp/dotNetty-kcp/KcpOutPutImp.cs new file mode 100644 index 0000000..4163740 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/KcpOutPutImp.cs @@ -0,0 +1,20 @@ +using System; +using base_kcp; +using DotNetty.Buffers; +using DotNetty.Transport.Channels.Sockets; +using fec; + +namespace dotNetty_kcp +{ + public class KcpOutPutImp:KcpOutput + { + public void outPut(IByteBuffer data, Kcp kcp) + { + Snmp.snmp.OutPkts++; + Snmp.snmp.OutBytes+=data.WriterIndex; + var user = (User) kcp.User; + var temp = new DatagramPacket(data,user.LocalAddress, user.RemoteAddress); + user.Channel.WriteAndFlushAsync(temp); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/KcpOutPutImp.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/KcpOutPutImp.cs.meta new file mode 100644 index 0000000..8783d37 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/KcpOutPutImp.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: fd4828372ce29cf418519b2eb3c3eebc +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/KcpServer.cs b/Runtime/csharp-kcp/dotNetty-kcp/KcpServer.cs new file mode 100644 index 0000000..ba3463a --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/KcpServer.cs @@ -0,0 +1,103 @@ +using System; +using System.Collections.Generic; +using System.Net.Sockets; +using System.Numerics; +using System.Threading; +using DotNetty.Transport.Bootstrapping; +using DotNetty.Transport.Channels; +using DotNetty.Transport.Channels.Sockets; +using dotNetty_kcp.thread; +using fec.fec; + +namespace dotNetty_kcp +{ + public class KcpServer + { + + private IExecutorPool _executorPool; + + private Bootstrap _bootstrap; + + private IEventLoopGroup _eventLoopGroup; + + private readonly List _localAddress = new List(); + + private IChannelManager _channelManager; + + private IScheduleThread _scheduleThread; + + + public void init(int workSize, KcpListener kcpListener, ChannelConfig channelConfig, params int[] ports) + { + _executorPool = new ExecutorPool(); + for (int i = 0; i < workSize; i++) + { + _executorPool.CreateMessageExecutor(); + } + init(_executorPool, kcpListener, channelConfig, ports); + } + + + public void init(IExecutorPool executorPool, KcpListener kcpListener, ChannelConfig channelConfig, params int[] ports) { + if(channelConfig.UseConvChannel){ + int convIndex = 0; + if(channelConfig.Crc32Check){ + convIndex+=Ukcp.HEADER_CRC; + } + if(channelConfig.FecDataShardCount!=0&&channelConfig.FecParityShardCount!=0){ + convIndex+= Fec.fecHeaderSizePlus2; + } + _channelManager = new ServerConvChannelManager(convIndex); + }else{ + _channelManager = new ServerEndPointChannelManager(); + } + + int cpuNum = Environment.ProcessorCount; + int bindTimes = cpuNum; + + _eventLoopGroup = new MultithreadEventLoopGroup(cpuNum); + _scheduleThread = new HashedWheelScheduleThread(); + + _bootstrap = new Bootstrap(); + //TODO epoll模型 服务器端怎么支持?得试试成功没有 + _bootstrap.Option(ChannelOption.SoReuseport, true); + // _bootstrap.Option(ChannelOption.SoReuseaddr, true); + _bootstrap.Group(_eventLoopGroup); + _bootstrap.ChannelFactory(() => new SocketDatagramChannel(AddressFamily.InterNetwork)); + _bootstrap.Handler(new ActionChannelInitializer(channel => + { + var pipeline = channel.Pipeline; + pipeline.AddLast(new ServerChannelHandler(_channelManager,channelConfig,executorPool,kcpListener,_scheduleThread)); + })); + + foreach (var port in ports) + { +// for (int i = 0; i < bindTimes; i++) { + var task = _bootstrap.BindAsync(port); + var channel = task.Result; + _localAddress.Add(channel); +// } + } + + //TODO 如何启动关闭进程的钩子?? + } + + + /** + * 同步关闭服务器 + */ + public void stop() { + foreach (var channel in _localAddress) + { + channel.CloseAsync().Wait(); + } + foreach (var ukcp in _channelManager.getAll()) + { + ukcp.close(); + } + _eventLoopGroup?.ShutdownGracefullyAsync(); + _executorPool?.stop(false); + _scheduleThread.stop(); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/KcpServer.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/KcpServer.cs.meta new file mode 100644 index 0000000..0201956 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/KcpServer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 3fbe87cd63be61b4c9c333a5d626de0d +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ReadTask.cs b/Runtime/csharp-kcp/dotNetty-kcp/ReadTask.cs new file mode 100644 index 0000000..cf252f2 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ReadTask.cs @@ -0,0 +1,112 @@ +using System; +using DotNetty.Buffers; +using DotNetty.Common; +using dotNetty_kcp.thread; + +namespace dotNetty_kcp +{ + public class ReadTask : ITask + { + private Ukcp kcp; + + private static readonly ThreadLocalPool RECYCLER = + new ThreadLocalPool(handle => new ReadTask(handle)); + + private readonly ThreadLocalPool.Handle recyclerHandle; + + private ReadTask(ThreadLocalPool.Handle recyclerHandle) + { + this.recyclerHandle = recyclerHandle; + } + + public static ReadTask New(Ukcp kcp) + { + ReadTask readTask = RECYCLER.Take(); + readTask.kcp = kcp; + return readTask; + } + + public void execute() + { + CodecOutputList bufList = null; + try { + //Thread.sleep(1000); + //查看连接状态 + if (!kcp.isActive()) { + return; + } + bool hasKcpMessage = false; + long current = kcp.currentMs(); + var readQueue = kcp.ReadQueue; + IByteBuffer byteBuf = null; + for (;;) + { + if (!readQueue.TryDequeue(out byteBuf)) + { + break; + } + hasKcpMessage = true; + kcp.input(byteBuf, current); + byteBuf.Release(); + } + if (!hasKcpMessage) { + return; + } + if (kcp.isStream()) { + while (kcp.canRecv()) { + if (bufList == null) { + bufList = CodecOutputList.NewInstance(); + } + kcp.receive(bufList); + } + int size = bufList.Count; + for (int i = 0; i < size; i++) + { + byteBuf = bufList[i]; + readBytebuf(byteBuf,current); + } + } else { + while (kcp.canRecv()) { + IByteBuffer recvBuf = kcp.mergeReceive(); + readBytebuf(recvBuf,current); + } + } + //判断写事件 + if (!kcp.WriteQueue.IsEmpty&&kcp.canSend(false)) { + kcp.notifyWriteEvent(); + } + } catch (Exception e) { + kcp.KcpListener.handleException(e,kcp); + Console.WriteLine(e); + } finally { + release(); + bufList?.Return(); + } + } + + + private void readBytebuf(IByteBuffer buf,long current) + { + kcp.LastRecieveTime = current; + try + { + kcp.getKcpListener().handleReceive(buf, kcp); + } + catch (Exception throwable) + { + kcp.getKcpListener().handleException(throwable, kcp); + } + finally + { + buf.Release(); + } + } + + private void release() + { + kcp.ReadProcessing.Set(false); + kcp = null; + recyclerHandle.Release(this); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ReadTask.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/ReadTask.cs.meta new file mode 100644 index 0000000..5bd72de --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ReadTask.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 939e0a4c80f24694ab6c7a252a557d30 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ScheduleTask.cs b/Runtime/csharp-kcp/dotNetty-kcp/ScheduleTask.cs new file mode 100644 index 0000000..f1c39ce --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ScheduleTask.cs @@ -0,0 +1,77 @@ +using System; +using base_kcp; +using DotNetty.Common.Utilities; +using dotNetty_kcp.thread; + +namespace dotNetty_kcp +{ + public class ScheduleTask:IScheduleTask,ITask + { + private readonly IMessageExecutor _imessageExecutor; + + private readonly IChannelManager _channelManager; + + private readonly Ukcp _ukcp; + + private readonly IScheduleThread _scheduleThread; + + + public ScheduleTask(IChannelManager channelManager, Ukcp ukcp, IScheduleThread scheduleThread) + { + _imessageExecutor = ukcp.IMessageExecutor; + _channelManager = channelManager; + _ukcp = ukcp; + _scheduleThread = scheduleThread; + } + + public void execute() + { + try + { + long now = _ukcp.currentMs(); + //判断连接是否关闭 + if (_ukcp.TimeoutMillis != 0 && now - _ukcp.TimeoutMillis > _ukcp.LastRecieveTime) { + _ukcp.internalClose(); + } + if(!_ukcp.isActive()){ + var user = _ukcp.user(); + //抛回网络线程处理连接删除 + user.Channel.EventLoop.Execute(() => {_channelManager.del(_ukcp);}); + _ukcp.release(); + return; + } + long timeLeft = _ukcp.getTsUpdate()-now; + //判断执行时间是否到了 + if(timeLeft>0){ + //System.err.println(timeLeft); + _scheduleThread.schedule(this, TimeSpan.FromMilliseconds(timeLeft)); + return; + } + + //long start = System.currentTimeMillis(); + long next = _ukcp.flush(now); + //System.err.println(next); + //System.out.println("耗时 "+(System.currentTimeMillis()-start)); + _scheduleThread.schedule(this, TimeSpan.FromMilliseconds(next)); + + //检测写缓冲区 如果能写则触发写事件 + if(!_ukcp.WriteQueue.IsEmpty&&_ukcp.canSend(false) + ){ + _ukcp.notifyWriteEvent(); + } + } catch (Exception e) { + Console.WriteLine(e); + } + } + + public void Run(ITimeout timeout) + { + _imessageExecutor.execute(this); + } + + public void Run() + { + _imessageExecutor.execute(this); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ScheduleTask.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/ScheduleTask.cs.meta new file mode 100644 index 0000000..a55bfe1 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ScheduleTask.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 997b18e2efbee8344a6863cf2a368429 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ServerChannelHandler.cs b/Runtime/csharp-kcp/dotNetty-kcp/ServerChannelHandler.cs new file mode 100644 index 0000000..30db06a --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ServerChannelHandler.cs @@ -0,0 +1,102 @@ +using System; +using base_kcp; +using DotNetty.Transport.Channels; +using DotNetty.Transport.Channels.Sockets; +using dotNetty_kcp.thread; +using DotNetty.Buffers; +using fec; +using fec.fec; + +namespace dotNetty_kcp +{ + public class ServerChannelHandler:ChannelHandlerAdapter + { + private readonly IChannelManager _channelManager; + + private readonly ChannelConfig _channelConfig ; + + private readonly IExecutorPool _executorPool; + + private readonly KcpListener _kcpListener; + private readonly IScheduleThread _scheduleThread; + + + public ServerChannelHandler(IChannelManager channelManager, ChannelConfig channelConfig, IExecutorPool executorPool, KcpListener kcpListener, IScheduleThread scheduleThread) + { + _channelManager = channelManager; + _channelConfig = channelConfig; + _executorPool = executorPool; + _kcpListener = kcpListener; + _scheduleThread = scheduleThread; + } + + public override void ExceptionCaught(IChannelHandlerContext context, Exception exception) + { + Console.WriteLine(exception); + } + + + public override void ChannelRead(IChannelHandlerContext context, object message) + { + var msg = (DatagramPacket) message; + var channel = context.Channel; + var ukcp = _channelManager.get(msg); + var content = msg.Content; + User user; + if (ukcp != null) + { + user = ukcp.user(); + //每次收到消息重绑定地址 + user.RemoteAddress = msg.Sender; + ukcp.read(content); + return; + } + + + //如果是新连接第一个包的sn必须为0 + var sn = getSn(content,_channelConfig); + if(sn!=0) + { + msg.Release(); + return; + } + + var messageExecutor = _executorPool.GetAutoMessageExecutor(); + KcpOutput kcpOutput = new KcpOutPutImp(); + + ReedSolomon reedSolomon = null; + if(_channelConfig.FecDataShardCount!=0&&_channelConfig.FecParityShardCount!=0){ + reedSolomon = ReedSolomon.create(_channelConfig.FecDataShardCount,_channelConfig.FecParityShardCount); + } + + ukcp = new Ukcp(kcpOutput,_kcpListener,messageExecutor,reedSolomon,_channelConfig); + + user = new User(channel,msg.Sender,msg.Recipient); + ukcp.user(user); + _channelManager.New(msg.Sender,ukcp,msg); + + messageExecutor.execute(new ConnectTask(ukcp, _kcpListener)); + + ukcp.read(content); + + var scheduleTask = new ScheduleTask(_channelManager,ukcp,_scheduleThread); + _scheduleThread.schedule(scheduleTask, TimeSpan.FromMilliseconds(ukcp.getInterval())); + } + + + private int getSn(IByteBuffer byteBuf,ChannelConfig channelConfig){ + var headerSize = 0; + if (channelConfig.Crc32Check) + { + headerSize+=Ukcp.HEADER_CRC; + } + if(channelConfig.FecDataShardCount!=0&&channelConfig.FecParityShardCount!=0){ + headerSize+= Fec.fecHeaderSizePlus2; + } + var sn = byteBuf.GetIntLE(byteBuf.ReaderIndex+Kcp.IKCP_SN_OFFSET+headerSize); + return sn; + } + + + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ServerChannelHandler.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/ServerChannelHandler.cs.meta new file mode 100644 index 0000000..b637d62 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ServerChannelHandler.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 2b43f3f1229e5ca4cbe3398d91bc559a +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ServerConvChannelManager.cs b/Runtime/csharp-kcp/dotNetty-kcp/ServerConvChannelManager.cs new file mode 100644 index 0000000..b7745a9 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ServerConvChannelManager.cs @@ -0,0 +1,65 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Net; +using System.Xml.Linq; +using DotNetty.Transport.Channels; +using DotNetty.Transport.Channels.Sockets; + +namespace dotNetty_kcp +{ + /** + * 根据conv确定一个session + */ + public class ServerConvChannelManager : IChannelManager + { + + private readonly ConcurrentDictionary _ukcps = new ConcurrentDictionary(); + + private readonly int convIndex; + + public ServerConvChannelManager(int convIndex) + { + this.convIndex = convIndex; + } + + public Ukcp get(DatagramPacket msg) + { + var conv = getConv(msg); + _ukcps.TryGetValue(conv, out var ukcp); + return ukcp; + } + + private int getConv(DatagramPacket msg) { + var bytebuffer = msg.Content; + return bytebuffer.GetIntLE(convIndex);; + } + + + + public void New(EndPoint endPoint, Ukcp ukcp,DatagramPacket msg) + { + var conv = ukcp.getConv(); + if (msg != null) { + conv = getConv(msg); + ukcp.setConv(conv); + } + _ukcps.TryAdd(conv, ukcp); + } + + public void del(Ukcp ukcp) + { + _ukcps.TryRemove(ukcp.getConv(), out var temp); + if (temp == null) + { + Console.WriteLine("ukcp session is not exist conv: " + ukcp.getConv()); + } + } + + public ICollection getAll() + { + return this._ukcps.Values; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ServerConvChannelManager.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/ServerConvChannelManager.cs.meta new file mode 100644 index 0000000..9c7bf8c --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ServerConvChannelManager.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 43cf6f6dfc50bcf4fad72c939334b11f +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ServerEndPointChannelManager.cs b/Runtime/csharp-kcp/dotNetty-kcp/ServerEndPointChannelManager.cs new file mode 100644 index 0000000..d07c905 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ServerEndPointChannelManager.cs @@ -0,0 +1,38 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Net; +using DotNetty.Transport.Channels.Sockets; + +namespace dotNetty_kcp +{ + public class ServerEndPointChannelManager : IChannelManager + { + private readonly ConcurrentDictionary _ukcps = new ConcurrentDictionary(); + + public Ukcp get(DatagramPacket msg) + { + _ukcps.TryGetValue(msg.Sender, out var ukcp); + return ukcp; + } + + public void New(EndPoint endPoint, Ukcp ukcp, DatagramPacket msg) + { + _ukcps[endPoint] = ukcp; + } + + public void del(Ukcp ukcp) + { + _ukcps.TryRemove(ukcp.user().RemoteAddress, out var temp); + if (temp == null) + { + Console.WriteLine("ukcp session is not exist RemoteAddress: " + ukcp.user().RemoteAddress); + } + } + + public ICollection getAll() + { + return _ukcps.Values; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/ServerEndPointChannelManager.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/ServerEndPointChannelManager.cs.meta new file mode 100644 index 0000000..470aad1 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/ServerEndPointChannelManager.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 864771e3193659f47b4a0ee5acb636be +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/Ukcp.cs b/Runtime/csharp-kcp/dotNetty-kcp/Ukcp.cs new file mode 100644 index 0000000..413f882 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/Ukcp.cs @@ -0,0 +1,634 @@ +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.IO; +using base_kcp; +using DotNetty.Buffers; +using DotNetty.Transport.Channels.Sockets; +using dotNetty_kcp.thread; +using DotNetty.Common.Internal; +using fec; +using fec.fec; + +namespace dotNetty_kcp +{ + public class Ukcp + { + public const int HEADER_CRC = 4, HEADER_NONCESIZE = 16; + + private readonly Kcp _kcp; + + private bool fastFlush = true; + + private long tsUpdate = -1; + + private bool _active; + + private readonly FecEncode _fecEncode; + private readonly FecDecode _fecDecode; + + private readonly ConcurrentQueue _writeQueue; + + private readonly MpscArrayQueue _readQueue; + + private readonly IMessageExecutor _iMessageExecutor; + + private readonly KcpListener _kcpListener; + + private readonly long _timeoutMillis; + + private readonly AtomicBoolean _readProcessing = new AtomicBoolean(); + + private readonly AtomicBoolean _writeProcessing = new AtomicBoolean(); + + private readonly bool _crc32Check; + + + /** + * 上次收到完整消息包时间 + * 用于心跳检测 + **/ + internal long LastRecieveTime { get; set; } = KcpUntils.currentMs(); + + + + /** + * Creates a new instance. + * + * @param output output for kcp + */ + public Ukcp(KcpOutput output, KcpListener kcpListener, IMessageExecutor iMessageExecutor, + ReedSolomon reedSolomon, ChannelConfig channelConfig) + { + this._timeoutMillis = channelConfig.TimeoutMillis; + this._crc32Check = channelConfig.Crc32Check; + this._kcp = new Kcp(channelConfig.Conv, output); + this._active = true; + this._kcpListener = kcpListener; + this._iMessageExecutor = iMessageExecutor; + //默认2<<11 可以修改 + _writeQueue = new ConcurrentQueue(); + // (2<<10); + _readQueue = new MpscArrayQueue(2<<10); + //recieveList = new SpscLinkedQueue<>(); + int headerSize = 0; + + + //init crc32 + if(channelConfig.Crc32Check){ + var kcpOutput = _kcp.Output; + kcpOutput = new Crc32OutPut(kcpOutput,headerSize); + _kcp.Output=kcpOutput; + headerSize+=HEADER_CRC; + } + + //init fec + if (reedSolomon != null) + { + var kcpOutput = _kcp.Output; + _fecEncode = new FecEncode(headerSize, reedSolomon, channelConfig.Mtu); + _fecDecode = new FecDecode(3 * reedSolomon.getTotalShardCount(), reedSolomon, channelConfig.Mtu); + kcpOutput = new FecOutPut(kcpOutput, _fecEncode); + _kcp.Output = kcpOutput; + headerSize += Fec.fecHeaderSizePlus2; + } + + _kcp.setReserved(headerSize); + initKcpConfig(channelConfig); + } + + + private void initKcpConfig(ChannelConfig channelConfig) + { + _kcp.initNodelay(channelConfig.Nodelay, channelConfig.Interval, channelConfig.Fastresend, + channelConfig.Nocwnd); + _kcp.SndWnd = channelConfig.Sndwnd; + _kcp.RcvWnd = channelConfig.Rcvwnd; + _kcp.Mtu = channelConfig.Mtu; + _kcp.Stream = channelConfig.Stream; + _kcp.AckNoDelay = channelConfig.AckNoDelay; + _kcp.setAckMaskSize(channelConfig.AckMaskSize); + fastFlush = channelConfig.FastFlush; + } + + + /** + * Receives ByteBufs. + * + * @param bufList received IByteBuffer will be add to the list + */ + protected internal void receive(List bufList) + { + _kcp.recv(bufList); + } + + + protected internal IByteBuffer mergeReceive() + { + return _kcp.mergeRecv(); + } + + + internal void input(IByteBuffer data, long current) + { +// _lastRecieveTime = KcpUntils.currentMs(); + Snmp.snmp.InPkts++; + Snmp.snmp.InBytes += data.ReadableBytes; + if (_crc32Check) + { + long checksum = data.ReadUnsignedIntLE(); + if (checksum != Crc32.ComputeChecksum(data,data.ReaderIndex,data.ReadableBytes)) + { + Snmp.snmp.InCsumErrors++; + return; + } + } + + if (_fecDecode != null) + { + FecPacket fecPacket = FecPacket.newFecPacket(data); + if (fecPacket.Flag == Fec.typeData) + { + data.SkipBytes(2); + input(data, true, current); + } + + if (fecPacket.Flag == Fec.typeData || fecPacket.Flag == Fec.typeParity) + { + var byteBufs = _fecDecode.decode(fecPacket); + if (byteBufs != null) + { + foreach (var IByteBuffer in byteBufs) + { + input(IByteBuffer, false, current); + IByteBuffer.Release(); + } + } + } + } + else + { + input(data, true, current); + } + } + + private void input(IByteBuffer data, bool regular, long current) + { + int ret = _kcp.input(data, regular, current); + switch (ret) + { + case -1: + throw new IOException("No enough bytes of head"); + case -2: + throw new IOException("No enough bytes of data"); + case -3: + throw new IOException("Mismatch cmd"); + case -4: + throw new IOException("Conv inconsistency"); + default: + break; + } + } + + + /** + * Sends a IByteBuffer. + * + * @param buf + * @throws IOException + */ + internal void send(IByteBuffer buf) + { + int ret = _kcp.send(buf); + switch (ret) + { + case -2: + throw new IOException("Too many fragments"); + default: + break; + } + } + + /** + * The size of the first msg of the kcp. + * + * @return The size of the first msg of the kcp, or -1 if none of msg + */ + internal int peekSize() + { + return _kcp.peekSize(); + } + + /** + * Returns {@code true} if there are bytes can be received. + * + * @return + */ + protected internal bool canRecv() + { + return _kcp.canRecv(); + } + + + /** + * Returns {@code true} if the kcp can send more bytes. + * + * @param curCanSend last state of canSend + * @return {@code true} if the kcp can send more bytes + */ + protected internal bool canSend(bool curCanSend) + { + int max = _kcp.SndWnd * 2; + + int waitSnd = _kcp.waitSnd(); + if (curCanSend) + { + return waitSnd < max; + } + else + { + int threshold = Math.Max(1, max / 2); + return waitSnd < threshold; + } + } + + /** + * Udpates the kcp. + * + * @param current current time in milliseconds + * @return the next time to update + */ + internal long update(long current) + { + _kcp.update(current); + long nextTsUp = check(current); + + setTsUpdate(nextTsUp); + return nextTsUp; + } + + protected internal long flush(long current) + { + return _kcp.flush(false, current); + } + + /** + * Determines when should you invoke udpate. + * + * @param current current time in milliseconds + * @return + * @see Kcp#check(long) + */ + protected internal long check(long current) + { + return _kcp.check(current); + } + + /** + * Returns {@code true} if the kcp need to flush. + * + * @return {@code true} if the kcp need to flush + */ + protected internal bool checkFlush() + { + return _kcp.checkFlush(); + } + + /** + * Sets params of nodelay. + * + * @param nodelay {@code true} if nodelay mode is enabled + * @param interval protocol internal work interval, in milliseconds + * @param resend fast retransmission mode, 0 represents off by default, 2 can be set (2 ACK spans will result + * in direct retransmission) + * @param nc {@code true} if turn off flow control + */ + protected internal void nodelay(bool nodelay, int interval, int resend, bool nc) + { + _kcp.initNodelay(nodelay, interval, resend, nc); + } + + /** + * Returns conv of kcp. + * + * @return conv of kcp + */ + public int getConv() + { + return _kcp.Conv; + } + + /** + * Set the conv of kcp. + * + * @param conv the conv of kcp + */ + public void setConv(int conv) + { + _kcp.Conv = conv; + } + + /** + * Returns {@code true} if and only if nodelay is enabled. + * + * @return {@code true} if and only if nodelay is enabled + */ + public bool isNodelay() + { + return _kcp.Nodelay; + } + + /** + * Sets whether enable nodelay. + * + * @param nodelay {@code true} if enable nodelay + * @return this object + */ + public Ukcp setNodelay(bool nodelay) + { + _kcp.Nodelay = nodelay; + return this; + } + + /** + * Returns update interval. + * + * @return update interval + */ + public int getInterval() + { + return _kcp.Interval; + } + + /** + * Sets update interval + * + * @param interval update interval + * @return this object + */ + public Ukcp setInterval(int interval) + { + _kcp.setInterval(interval); + return this; + } + + /** + * Returns the fastresend of kcp. + * + * @return the fastresend of kcp + */ + public int getFastResend() + { + return _kcp.Fastresend; + } + + /** + * Sets the fastresend of kcp. + * + * @param fastResend + * @return this object + */ + public Ukcp setFastResend(int fastResend) + { + _kcp.Fastresend=fastResend; + return this; + } + + public bool isNocwnd() + { + return _kcp.Nocwnd; + } + + public Ukcp setNocwnd(bool nocwnd) + { + _kcp.Nocwnd = nocwnd; + return this; + } + + public int getMinRto() + { + return _kcp.RxMinrto; + } + + public Ukcp setMinRto(int minRto) + { + _kcp.RxMinrto = minRto; + return this; + } + + public int getMtu() + { + return _kcp.Mtu; + } + + public Ukcp setMtu(int mtu) + { + _kcp.setMtu(mtu); + return this; + } + + public bool isStream() + { + return _kcp.Stream; + } + + public Ukcp setStream(bool stream) + { + _kcp.Stream=stream; + return this; + } + + public int getDeadLink() + { + return _kcp.DeadLink; + } + + public Ukcp setDeadLink(int deadLink) + { + _kcp.DeadLink = deadLink; + return this; + } + + /** + * Sets the {@link ByteBufAllocator} which is used for the kcp to allocate buffers. + * + * @param allocator the allocator is used for the kcp to allocate buffers + * @return this object + */ + public Ukcp setByteBufAllocator(IByteBufferAllocator allocator) + { + _kcp.ByteBufAllocator = allocator; + return this; + } + + public int waitSnd() + { + return _kcp.waitSnd(); + } + + public int getRcvWnd() + { + return _kcp.RcvWnd; + } + + + protected internal bool isFastFlush() + { + return fastFlush; + } + + public Ukcp setFastFlush(bool fastFlush) + { + this.fastFlush = fastFlush; + return this; + } + + + internal void read(IByteBuffer iByteBuffer) + { + if (_readQueue.TryEnqueue(iByteBuffer)) + { + notifyReadEvent(); + } + else + { + iByteBuffer.Release(); + Console.WriteLine("conv "+_kcp.Conv+" recieveList is full"); + } + } + + /** + * 主动发消息使用 + * 线程安全的 + * @param IByteBuffer 发送后需要手动释放 + * @return + */ + public bool write(IByteBuffer byteBuffer) + { + byteBuffer = byteBuffer.RetainedDuplicate(); + + _writeQueue.Enqueue(byteBuffer); + // if (!_writeQueue.TryEnqueue(byteBuffer)) + // { + // Console.WriteLine("conv "+kcp.Conv+" sendList is full"); + // byteBuffer.Release(); + // return false; + // } + notifyWriteEvent(); + return true; + } + + + /** + * 主动关闭连接调用 + */ + public void close() + { + this._iMessageExecutor.execute(new CloseTask(this)); + } + + private void notifyReadEvent() + { + if (_readProcessing.CompareAndSet(false, true)) + { + var readTask = ReadTask.New(this); + _iMessageExecutor.execute(readTask); + } + } + + protected internal void notifyWriteEvent() + { + if (_writeProcessing.CompareAndSet(false, true)) + { + var writeTask = WriteTask.New(this); + _iMessageExecutor.execute(writeTask); + } + } + + + protected internal long getTsUpdate() + { + return tsUpdate; + } + + + protected internal Ukcp setTsUpdate(long tsUpdate) + { + this.tsUpdate = tsUpdate; + return this; + } + + + + + protected internal KcpListener getKcpListener() + { + return _kcpListener; + } + + public bool isActive() + { + return _active; + } + + + protected internal void internalClose() + { + _kcpListener.handleClose(this); + _active = false; + } + + internal void release() + { + _kcp.State = -1; + _kcp.release(); + + IByteBuffer buffer; + while (_writeQueue.TryDequeue(out buffer)) + { + buffer.Release(); + } + + while (_readQueue.TryDequeue(out buffer)) + { + buffer.Release(); + } + _fecEncode?.release(); + _fecDecode?.release(); + } + + + + public User user() + { + return (User) _kcp.User; + } + + public Ukcp user(User user) + { + _kcp.User = user; + return this; + } + + internal ConcurrentQueue WriteQueue => _writeQueue; + + internal MpscArrayQueue ReadQueue => _readQueue; + + public long TimeoutMillis => _timeoutMillis; + + + internal long currentMs() + { + return _kcp.currentMs(); + } + + + internal AtomicBoolean ReadProcessing => _readProcessing; + + internal AtomicBoolean WriteProcessing => _writeProcessing; + + protected internal KcpListener KcpListener => _kcpListener; + + internal IMessageExecutor IMessageExecutor => _iMessageExecutor; + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/Ukcp.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/Ukcp.cs.meta new file mode 100644 index 0000000..99786af --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/Ukcp.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 1b0e3ac318f9c7948b0dfd39a05c41d5 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/User.cs b/Runtime/csharp-kcp/dotNetty-kcp/User.cs new file mode 100644 index 0000000..498d213 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/User.cs @@ -0,0 +1,50 @@ +using System; +using System.Net; +using DotNetty.Transport.Channels; + +namespace dotNetty_kcp +{ + public class User + { + + private IChannel channel; + private EndPoint remoteAddress; + private EndPoint localAddress; + + private object Object; + + + public User(IChannel channel, EndPoint remoteAddress, EndPoint localAddress) + { + this.channel = channel; + this.remoteAddress = remoteAddress; + this.localAddress = localAddress; + } + + + public IChannel Channel + { + get => channel; + set => channel = value; + } + + + public EndPoint RemoteAddress + { + get => remoteAddress; + set => remoteAddress = value; + } + + public EndPoint LocalAddress + { + get => localAddress; + set => localAddress = value; + } + + public object O + { + get => Object; + set => Object = value; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/User.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/User.cs.meta new file mode 100644 index 0000000..d942b93 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/User.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 749072612464fdd42a9f59d86c8a7a63 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/WriteTask.cs b/Runtime/csharp-kcp/dotNetty-kcp/WriteTask.cs new file mode 100644 index 0000000..6812482 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/WriteTask.cs @@ -0,0 +1,91 @@ +using System; +using System.IO; +using DotNetty.Buffers; +using DotNetty.Common; +using dotNetty_kcp.thread; + +namespace dotNetty_kcp +{ + public class WriteTask : ITask + { + private Ukcp kcp; + + private static readonly ThreadLocalPool RECYCLER = + new ThreadLocalPool(handle => new WriteTask(handle)); + + private readonly ThreadLocalPool.Handle recyclerHandle; + + private WriteTask(ThreadLocalPool.Handle recyclerHandle) + { + this.recyclerHandle = recyclerHandle; + } + + public static WriteTask New(Ukcp kcp) + { + WriteTask recieveTask = RECYCLER.Take(); + recieveTask.kcp = kcp; + return recieveTask; + } + + + public void execute() + { + try + { + //查看连接状态 + if (!kcp.isActive()) + { + return; + } + + //从发送缓冲区到kcp缓冲区 + var writeQueue = kcp.WriteQueue; + IByteBuffer byteBuf = null; + while (kcp.canSend(false)) + { + if (!writeQueue.TryDequeue(out byteBuf)) + { + break; + } + try + { + this.kcp.send(byteBuf); + byteBuf.Release(); + } + catch (IOException e) + { + kcp.getKcpListener().handleException(e, kcp); + return; + } + } + + //如果有发送 则检测时间 + if (kcp.canSend(false) && (!kcp.checkFlush() || !kcp.isFastFlush())) + { + return; + } + + long now = kcp.currentMs(); + long next = kcp.flush(now); + //System.out.println(next); + //System.out.println("耗时"+(System.currentTimeMillis()-now)); + kcp.setTsUpdate(now + next); + } + catch (Exception e) + { + Console.WriteLine(e); + } + finally + { + release(); + } + } + + private void release() + { + kcp.WriteProcessing.Set(false); + kcp = null; + recyclerHandle.Release(this); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/WriteTask.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/WriteTask.cs.meta new file mode 100644 index 0000000..313351a --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/WriteTask.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 2757c02f09120f5438d77352d4a0071f +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/queue.meta b/Runtime/csharp-kcp/dotNetty-kcp/queue.meta new file mode 100644 index 0000000..0c05bbb --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/queue.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: a44c18aa3a94907449e235d91f9010c2 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/queue/ConcurrentCircularArrayQueue.cs b/Runtime/csharp-kcp/dotNetty-kcp/queue/ConcurrentCircularArrayQueue.cs new file mode 100644 index 0000000..318a507 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/queue/ConcurrentCircularArrayQueue.cs @@ -0,0 +1,73 @@ +using DotNetty.Common.Internal; +using DotNetty.Common.Utilities; + +namespace base_kcp +{ + public abstract class ConcurrentCircularArrayQueue : ConcurrentCircularArrayQueueL0Pad + where T : class + { + protected long Mask; + protected readonly T[] Buffer; + + protected ConcurrentCircularArrayQueue(int capacity) + { + int actualCapacity = IntegerExtensions.RoundUpToPowerOfTwo(capacity); + this.Mask = actualCapacity - 1; + // pad data on either end with some empty slots. + this.Buffer = new T[actualCapacity + RefArrayAccessUtil.RefBufferPad * 2]; + } + + /// + /// Calculates an element offset based on a given array index. + /// + /// The desirable element index. + /// The offset in bytes within the array for a given index. + protected long CalcElementOffset(long index) => RefArrayAccessUtil.CalcElementOffset(index, this.Mask); + + /// + /// A plain store (no ordering/fences) of an element to a given offset. + /// + /// Computed via . + /// A kitty. + protected void SpElement(long offset, T e) => RefArrayAccessUtil.SpElement(this.Buffer, offset, e); + + /// + /// An ordered store(store + StoreStore barrier) of an element to a given offset. + /// + /// Computed via . + /// An orderly kitty. + protected void SoElement(long offset, T e) => RefArrayAccessUtil.SoElement(this.Buffer, offset, e); + + /// + /// A plain load (no ordering/fences) of an element from a given offset. + /// + /// Computed via . + /// The element at the offset. + protected T LpElement(long offset) => RefArrayAccessUtil.LpElement(this.Buffer, offset); + + /// + /// A volatile load (load + LoadLoad barrier) of an element from a given offset. + /// + /// Computed via . + /// The element at the offset. + protected T LvElement(long offset) => RefArrayAccessUtil.LvElement(this.Buffer, offset); + + public override void Clear() + { + while (this.TryDequeue(out T _) || !this.IsEmpty) + { + // looping + } + } + + public int Capacity() => (int)(this.Mask + 1); + } + + public abstract class ConcurrentCircularArrayQueueL0Pad : AbstractQueue + { +#pragma warning disable 169 // padded reference + long p00, p01, p02, p03, p04, p05, p06, p07; + long p30, p31, p32, p33, p34, p35, p36, p37; +#pragma warning restore 169 + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/queue/ConcurrentCircularArrayQueue.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/queue/ConcurrentCircularArrayQueue.cs.meta new file mode 100644 index 0000000..d39c24f --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/queue/ConcurrentCircularArrayQueue.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: fea906a1f4971494ab8cc2bee7004e15 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/queue/MpscArrayQueue.cs b/Runtime/csharp-kcp/dotNetty-kcp/queue/MpscArrayQueue.cs new file mode 100644 index 0000000..5ab9132 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/queue/MpscArrayQueue.cs @@ -0,0 +1,319 @@ +using System.Diagnostics.Contracts; +using System.Threading; +using DotNetty.Common.Internal; + +namespace base_kcp +{ + public class MpscArrayQueue : MpscArrayQueueConsumerField + where T : class + { +#pragma warning disable 169 // padded reference + long p40, p41, p42, p43, p44, p45, p46; + long p30, p31, p32, p33, p34, p35, p36, p37; +#pragma warning restore 169 + + public MpscArrayQueue(int capacity) + : base(capacity) + { + } + + /// + /// Lock free Enqueue operation, using a single compare-and-swap. As the class name suggests, access is + /// permitted to many threads concurrently. + /// + /// The item to enqueue. + /// true if the item was added successfully, otherwise false. + /// + public override bool TryEnqueue(T e) + { + Contract.Requires(e != null); + + // use a cached view on consumer index (potentially updated in loop) + long mask = this.Mask; + long capacity = mask + 1; + long consumerIndexCache = this.ConsumerIndexCache; // LoadLoad + long currentProducerIndex; + do + { + currentProducerIndex = this.ProducerIndex; // LoadLoad + long wrapPoint = currentProducerIndex - capacity; + if (consumerIndexCache <= wrapPoint) + { + long currHead = this.ConsumerIndex; // LoadLoad + if (currHead <= wrapPoint) + { + return false; // FULL :( + } + else + { + // update shared cached value of the consumerIndex + this.ConsumerIndexCache = currHead; // StoreLoad + // update on stack copy, we might need this value again if we lose the CAS. + consumerIndexCache = currHead; + } + } + } while (!this.TrySetProducerIndex(currentProducerIndex, currentProducerIndex + 1)); + + // NOTE: the new producer index value is made visible BEFORE the element in the array. If we relied on + // the index visibility to poll() we would need to handle the case where the element is not visible. + + // Won CAS, move on to storing + long offset = RefArrayAccessUtil.CalcElementOffset(currentProducerIndex, mask); + this.SoElement(offset, e); // StoreStore + return true; // AWESOME :) + } + + /// + /// A wait-free alternative to , which fails on compare-and-swap failure. + /// + /// The item to enqueue. + /// 1 if next element cannot be filled, -1 if CAS failed, and 0 if successful. + public int WeakEnqueue(T e) + { + Contract.Requires(e != null); + + long mask = this.Mask; + long capacity = mask + 1; + long currentTail = this.ProducerIndex; // LoadLoad + long consumerIndexCache = this.ConsumerIndexCache; // LoadLoad + long wrapPoint = currentTail - capacity; + if (consumerIndexCache <= wrapPoint) + { + long currHead = this.ConsumerIndex; // LoadLoad + if (currHead <= wrapPoint) + { + return 1; // FULL :( + } + else + { + this.ConsumerIndexCache = currHead; // StoreLoad + } + } + + // look Ma, no loop! + if (!this.TrySetProducerIndex(currentTail, currentTail + 1)) + { + return -1; // CAS FAIL :( + } + + // Won CAS, move on to storing + long offset = RefArrayAccessUtil.CalcElementOffset(currentTail, mask); + this.SoElement(offset, e); + return 0; // AWESOME :) + } + + /// + /// Lock free poll using ordered loads/stores. As class name suggests, access is limited to a single thread. + /// + /// The dequeued item. + /// true if an item was retrieved, otherwise false. + /// + public override bool TryDequeue(out T item) + { + long consumerIndex = this.ConsumerIndex; // LoadLoad + long offset = this.CalcElementOffset(consumerIndex); + // Copy field to avoid re-reading after volatile load + T[] buffer = this.Buffer; + + // If we can't see the next available element we can't poll + T e = RefArrayAccessUtil.LvElement(buffer, offset); // LoadLoad + if (null == e) + { + // NOTE: Queue may not actually be empty in the case of a producer (P1) being interrupted after + // winning the CAS on offer but before storing the element in the queue. Other producers may go on + // to fill up the queue after this element. + + if (consumerIndex != this.ProducerIndex) + { + do + { + e = RefArrayAccessUtil.LvElement(buffer, offset); + } while (e == null); + } + else + { + item = default(T); + return false; + } + } + + RefArrayAccessUtil.SpElement(buffer, offset, default(T)); + this.ConsumerIndex = consumerIndex + 1; // StoreStore + item = e; + return true; + } + + /// + /// Lock free peek using ordered loads. As class name suggests access is limited to a single thread. + /// + /// The peeked item. + /// true if an item was retrieved, otherwise false. + /// + public override bool TryPeek(out T item) + { + // Copy field to avoid re-reading after volatile load + T[] buffer = this.Buffer; + + long consumerIndex = this.ConsumerIndex; // LoadLoad + long offset = this.CalcElementOffset(consumerIndex); + T e = RefArrayAccessUtil.LvElement(buffer, offset); + if (null == e) + { + // NOTE: Queue may not actually be empty in the case of a producer (P1) being interrupted after + // winning the CAS on offer but before storing the element in the queue. Other producers may go on + // to fill up the queue after this element. + + if (consumerIndex != this.ProducerIndex) + { + do + { + e = RefArrayAccessUtil.LvElement(buffer, offset); + } while (e == null); + } + else + { + item = default(T); + return false; + } + } + + item = e; + + return true; + } + + /// + /// Returns the number of items in this . + /// + public override int Count + { + get + { + // It is possible for a thread to be interrupted or reschedule between the read of the producer and + // consumer indices, therefore protection is required to ensure size is within valid range. In the + // event of concurrent polls/offers to this method the size is OVER estimated as we read consumer + // index BEFORE the producer index. + + long after = this.ConsumerIndex; + while (true) + { + long before = after; + long currentProducerIndex = this.ProducerIndex; + after = this.ConsumerIndex; + if (before == after) + { + return (int) (currentProducerIndex - after); + } + } + } + } + + public override bool IsEmpty + { + get + { + // Order matters! + // Loading consumer before producer allows for producer increments after consumer index is read. + // This ensures the correctness of this method at least for the consumer thread. Other threads POV is + // not really + // something we can fix here. + return this.ConsumerIndex == this.ProducerIndex; + } + } + } + + public abstract class MpscArrayQueueL1Pad : ConcurrentCircularArrayQueue + where T : class + { +#pragma warning disable 169 // padded reference + long p10, p11, p12, p13, p14, p15, p16; + long p30, p31, p32, p33, p34, p35, p36, p37; +#pragma warning restore 169 + + protected MpscArrayQueueL1Pad(int capacity) + : base(capacity) + { + } + } + + public abstract class MpscArrayQueueTailField : MpscArrayQueueL1Pad + where T : class + { + long producerIndex; + + protected MpscArrayQueueTailField(int capacity) + : base(capacity) + { + } + + protected long ProducerIndex => Volatile.Read(ref this.producerIndex); + + protected bool TrySetProducerIndex(long expect, long newValue) => + Interlocked.CompareExchange(ref this.producerIndex, newValue, expect) == expect; + } + + public abstract class MpscArrayQueueMidPad : MpscArrayQueueTailField + where T : class + { +#pragma warning disable 169 // padded reference + long p20, p21, p22, p23, p24, p25, p26; + long p30, p31, p32, p33, p34, p35, p36, p37; +#pragma warning restore 169 + + protected MpscArrayQueueMidPad(int capacity) + : base(capacity) + { + } + } + + public abstract class MpscArrayQueueHeadCacheField : MpscArrayQueueMidPad + where T : class + { + long headCache; + + protected MpscArrayQueueHeadCacheField(int capacity) + : base(capacity) + { + } + + protected long ConsumerIndexCache + { + get { return Volatile.Read(ref this.headCache); } + set { Volatile.Write(ref this.headCache, value); } + } + } + + public abstract class MpscArrayQueueL2Pad : MpscArrayQueueHeadCacheField + where T : class + { +#pragma warning disable 169 // padded reference + long p20, p21, p22, p23, p24, p25, p26; + long p30, p31, p32, p33, p34, p35, p36, p37; +#pragma warning restore 169 + + protected MpscArrayQueueL2Pad(int capacity) + : base(capacity) + { + } + } + + public abstract class MpscArrayQueueConsumerField : MpscArrayQueueL2Pad + where T : class + { + long consumerIndex; + + protected MpscArrayQueueConsumerField(int capacity) + : base(capacity) + { + } + + protected long ConsumerIndex + { + get { return Volatile.Read(ref this.consumerIndex); } + set + { + Volatile.Write(ref this.consumerIndex, value); + } // todo: revisit: UNSAFE.putOrderedLong -- StoreStore fence + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/queue/MpscArrayQueue.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/queue/MpscArrayQueue.cs.meta new file mode 100644 index 0000000..9ba1389 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/queue/MpscArrayQueue.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 7d1bafe31d6030b4099676d5a1f7a170 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/queue/RefArrayAccessUtil.cs b/Runtime/csharp-kcp/dotNetty-kcp/queue/RefArrayAccessUtil.cs new file mode 100644 index 0000000..5a43757 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/queue/RefArrayAccessUtil.cs @@ -0,0 +1,54 @@ +using System; +using System.Threading; + +namespace base_kcp +{ + public class RefArrayAccessUtil + { + public static readonly int RefBufferPad = 64 * 2 / IntPtr.Size; + + /// + /// A plain store (no ordering/fences) of an element to a given offset. + /// + /// The element type. + /// The source buffer. + /// Computed via + /// An orderly kitty. + public static void SpElement(T[] buffer, long offset, T e) => buffer[offset] = e; + + /// + /// An ordered store(store + StoreStore barrier) of an element to a given offset. + /// + /// The element type. + /// The source buffer. + /// Computed via + /// + public static void SoElement(T[] buffer, long offset, T e) where T : class => Volatile.Write(ref buffer[offset], e); + + /// + /// A plain load (no ordering/fences) of an element from a given offset. + /// + /// The element type. + /// The source buffer. + /// Computed via + /// The element at the given in the given . + public static T LpElement(T[] buffer, long offset) => buffer[offset]; + + /// + /// A volatile load (load + LoadLoad barrier) of an element from a given offset. + /// + /// The element type. + /// The source buffer. + /// Computed via + /// The element at the given in the given . + public static T LvElement(T[] buffer, long offset) where T : class => Volatile.Read(ref buffer[offset]); + + /// + /// Gets the offset in bytes within the array for a given index. + /// + /// The desired element index. + /// Mask for the index. + /// The offset (in bytes) within the array for a given index. + public static long CalcElementOffset(long index, long mask) => RefBufferPad + (index & mask); + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/queue/RefArrayAccessUtil.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/queue/RefArrayAccessUtil.cs.meta new file mode 100644 index 0000000..a8d52cf --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/queue/RefArrayAccessUtil.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 67c9f27d84a8c7a40ba246e0601b66ae +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread.meta new file mode 100644 index 0000000..38606f5 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 6cf11e86805567b488e95557a80739bf +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/AbstratcMessageExecutor.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/AbstratcMessageExecutor.cs new file mode 100644 index 0000000..0b9d14d --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/AbstratcMessageExecutor.cs @@ -0,0 +1,96 @@ +using System.Threading; +using base_kcp; + +namespace dotNetty_kcp.thread +{ + public abstract class AbstratcMessageExecutor:IMessageExecutor + { + private Thread _thread; + + private volatile bool shutdown; + private volatile bool close; + + private readonly object _gate = new object(); + private static int id; + + + /** + * 启动消息处理器 + */ + public virtual void start() + { + _thread = new Thread(run) {Name = "ThreadMessageExecutor-" + id++}; + _thread.Start(); + } + + /**** + * + */ + public void stop(bool stopImmediately) + { + if (shutdown) + return; + shutdown = true; + if (stopImmediately) + { + close = true; + lock (_gate) + { + Monitor.Pulse(_gate); + } + + return; + } + while (!isEmpty()) + { + Thread.Sleep(1); + } + close = true; + lock (_gate) + { + Monitor.Pulse(_gate); + } + } + + public abstract bool isFull(); + + protected abstract bool isEmpty(); + + protected abstract bool TryDequeue(out ITask task); + + protected abstract bool TryEnqueue( ITask task); + + + private void run() + { + while (!close) + { + if (TryDequeue(out var task)) + { + task.execute(); + continue; + } + lock (_gate) + { + Monitor.Wait(_gate); + } + } + } + + + + + public bool execute(ITask iTask) + { + if (shutdown) + return false; + bool result = TryEnqueue(iTask); + lock (_gate) + { + Monitor.Pulse(_gate); + } + return result; + } + + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/AbstratcMessageExecutor.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/AbstratcMessageExecutor.cs.meta new file mode 100644 index 0000000..09e187e --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/AbstratcMessageExecutor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 867f536c8ef295c4eac300d10ed48fba +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/AtomicBoolean.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/AtomicBoolean.cs new file mode 100644 index 0000000..4be0595 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/AtomicBoolean.cs @@ -0,0 +1,78 @@ +using System.Threading; + +namespace dotNetty_kcp.thread +{ + public class AtomicBoolean + { + private int _value; + + public AtomicBoolean() + : this(false) { + + } + + /// Creates a new AtomicBoolean instance with the initial value provided. + /// + public AtomicBoolean(bool value) { + _value = value ? 1 : 0; + } + + /// + /// This method returns the current value. + /// + /// + /// The bool value to be accessed atomically. + /// + public bool Get() { + return _value != 0; + } + + /// + /// This method sets the current value atomically. + /// + /// + /// The new value to set. + /// + public void Set(bool value) { + Interlocked.Exchange(ref _value, value ? 1 : 0); + } + + /// + /// This method atomically sets the value and returns the original value. + /// + /// + /// The new value. + /// + /// + /// The value before setting to the new value. + /// + public bool GetAndSet(bool value) { + return Interlocked.Exchange(ref _value, value ? 1 : 0) != 0; + } + + /// + /// Atomically sets the value to the given updated value if the current value == the expected value. + /// + /// + /// The value to compare against. + /// + /// + /// The value to set if the value is equal to the expected value. + /// + /// + /// true if the comparison and set was successful. A false indicates the comparison failed. + /// + public bool CompareAndSet(bool expected, bool result) { + int e = expected ? 1 : 0; + int r = result ? 1 : 0; + return Interlocked.CompareExchange(ref _value, r, e) == e; + } + + /// + /// This operator allows an implicit cast from AtomicBoolean to int. + /// + public static implicit operator bool(AtomicBoolean value) { + return value.Get(); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/AtomicBoolean.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/AtomicBoolean.cs.meta new file mode 100644 index 0000000..fa68b71 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/AtomicBoolean.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 7b24543e75d80614fbfd8471794769c0 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/DistuptorMessageExecutor.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/DistuptorMessageExecutor.cs new file mode 100644 index 0000000..50a4ab3 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/DistuptorMessageExecutor.cs @@ -0,0 +1,44 @@ +using System.Threading.Tasks; + +namespace dotNetty_kcp.thread +{ + /** + * + * 单生产者单消费者 500W tps + * + */ + public class DistuptorMessageExecutor:AbstratcMessageExecutor + { + private RingBuffer _ringBuffer; + + private const int MAX_QUEUE_SIZE = 2 << 10; + + + public override void start() + { + _ringBuffer = new RingBuffer(MAX_QUEUE_SIZE); + base.start(); + } + + + public override bool isFull() + { + return _ringBuffer.Count == MAX_QUEUE_SIZE; + } + + protected override bool isEmpty() + { + return _ringBuffer.Count == 0; + } + + protected override bool TryDequeue(out ITask task) + { + return _ringBuffer.TryDequeue(out task); + } + + protected override bool TryEnqueue(ITask task) + { + return _ringBuffer.tryEnqueue(task); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/DistuptorMessageExecutor.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/DistuptorMessageExecutor.cs.meta new file mode 100644 index 0000000..e94097c --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/DistuptorMessageExecutor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 487b7adf4486e2043b1cf8fc76977259 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/EventLoopScheduleThread.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/EventLoopScheduleThread.cs new file mode 100644 index 0000000..2bb06a2 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/EventLoopScheduleThread.cs @@ -0,0 +1,25 @@ +using System; +using DotNetty.Transport.Channels; + +namespace dotNetty_kcp.thread +{ + public class EventLoopScheduleThread : IScheduleThread + { + private readonly IEventLoop _eventLoop = new SingleThreadEventLoop(); + + + public void schedule(IScheduleTask scheduleTask, TimeSpan timeSpan) + { + _eventLoop.Schedule(scheduleTask, timeSpan); + } + + public void stop() + { + if (_eventLoop.IsShuttingDown) + { + return; + } + _eventLoop.ShutdownGracefullyAsync(); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/EventLoopScheduleThread.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/EventLoopScheduleThread.cs.meta new file mode 100644 index 0000000..f115c48 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/EventLoopScheduleThread.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 6cd96ff2a4d0131408ae9ef1770fa21f +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/ExecutorPool.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/ExecutorPool.cs new file mode 100644 index 0000000..90441b1 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/ExecutorPool.cs @@ -0,0 +1,40 @@ +using System.Collections.Generic; +using System.Threading; +using base_kcp; + +namespace dotNetty_kcp.thread +{ + public class ExecutorPool:IExecutorPool + { + private List _messageExecutors = new List(); + + private int atomicIndex; + + public IMessageExecutor CreateMessageExecutor() + { + IMessageExecutor executor = new ThreadMessageExecutor(); + executor.start(); + _messageExecutors.Add(executor); + return executor; + } + + public void stop(bool stopImmediately) + { + foreach (var messageExecutor in _messageExecutors) + { + messageExecutor.stop(stopImmediately); + + } + } + + public IMessageExecutor GetAutoMessageExecutor() + { + Interlocked.Increment(ref atomicIndex); + return _messageExecutors[atomicIndex % _messageExecutors.Count]; + } + + public void scheduleTask(IScheduleTask scheduleTask) + { + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/ExecutorPool.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/ExecutorPool.cs.meta new file mode 100644 index 0000000..5c8c8e5 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/ExecutorPool.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: cac5ea355f4c12140b30e2612e7718cb +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/HashedWheelScheduleThread.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/HashedWheelScheduleThread.cs new file mode 100644 index 0000000..5f771bb --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/HashedWheelScheduleThread.cs @@ -0,0 +1,25 @@ +using System; +using DotNetty.Common.Utilities; + +namespace dotNetty_kcp.thread +{ + /** + * netty的实现轮实现,在unity环境下测试会导致cpu跑到50% + * 服务器端使用不错 + */ + public class HashedWheelScheduleThread:IScheduleThread + { + + private readonly HashedWheelTimer _hashedWheelTimer = new HashedWheelTimer(TimeSpan.FromMilliseconds(1),512,-1 ); + + public void schedule(IScheduleTask scheduleTask,TimeSpan timeSpan) + { + _hashedWheelTimer.NewTimeout(scheduleTask,timeSpan); + } + + public void stop() + { + _hashedWheelTimer.StopAsync(); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/HashedWheelScheduleThread.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/HashedWheelScheduleThread.cs.meta new file mode 100644 index 0000000..b63739d --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/HashedWheelScheduleThread.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 740eb2cd7b50c2c4999d641243ecdc11 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/IExecutorPool.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/IExecutorPool.cs new file mode 100644 index 0000000..c903a98 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/IExecutorPool.cs @@ -0,0 +1,17 @@ +using System; + +namespace dotNetty_kcp.thread +{ + public interface IExecutorPool + { + IMessageExecutor CreateMessageExecutor(); + + void stop(bool stopImmediately); + + IMessageExecutor GetAutoMessageExecutor(); + + + void scheduleTask(IScheduleTask scheduleTask); + + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/IExecutorPool.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/IExecutorPool.cs.meta new file mode 100644 index 0000000..ea66521 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/IExecutorPool.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: ea8dbe218e66fef4cadc51b52ac60f78 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/IMessageExecutor.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/IMessageExecutor.cs new file mode 100644 index 0000000..9921f5a --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/IMessageExecutor.cs @@ -0,0 +1,34 @@ +using System; + +namespace dotNetty_kcp.thread +{ + public interface IMessageExecutor + { + /** + * 启动消息处理器 + */ + void start(); + + /** + * 停止消息处理器 + * shutdownRightNow false该方法会堵塞当前队列全部执行完再关闭 + */ + void stop(bool stopImmediately); + + /** + * 判断队列是否已经达到上限了 + * @return + */ + bool isFull(); + + /** + * 执行任务 + * 注意: 如果线程等于当前线程 则直接执行 如果非当前线程放进队列 + * + * @param iTask + */ + bool execute(ITask iTask); + + + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/IMessageExecutor.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/IMessageExecutor.cs.meta new file mode 100644 index 0000000..55fe828 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/IMessageExecutor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: d2f3cc8ab992aea4f99c905745fdabda +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/IScheduleThread.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/IScheduleThread.cs new file mode 100644 index 0000000..287dda9 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/IScheduleThread.cs @@ -0,0 +1,13 @@ +using System; +using DotNetty.Common.Utilities; + +namespace dotNetty_kcp.thread +{ + public interface IScheduleThread + { + void schedule(IScheduleTask scheduleTask,TimeSpan timeSpan); + + + void stop(); + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/IScheduleThread.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/IScheduleThread.cs.meta new file mode 100644 index 0000000..1053872 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/IScheduleThread.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 903d45e0ce6dfaa4e84f0096ffdc290b +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/ITask.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/ITask.cs new file mode 100644 index 0000000..fd35bd8 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/ITask.cs @@ -0,0 +1,7 @@ +namespace dotNetty_kcp.thread +{ + public interface ITask + { + void execute(); + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/ITask.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/ITask.cs.meta new file mode 100644 index 0000000..fd53052 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/ITask.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 0c6f7935707bed6418ba599e77d5936b +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/MessageExecutorTest.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/MessageExecutorTest.cs new file mode 100644 index 0000000..087480c --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/MessageExecutorTest.cs @@ -0,0 +1,55 @@ +using System; +using dotNetty_kcp.thread; + +namespace base_kcp +{ + public class MessageExecutorTest:ITask + { + private static IMessageExecutor _messageExecutor; + + public int i; + + public static long start = KcpUntils.currentMs(); + + private static int index = 0; + + public MessageExecutorTest(int i) + { + this.i = i; + } + + public static int addIndex; + + public static void en() + { + int i = 0; + while (true) + { + var queueTest = new MessageExecutorTest(i); + if (_messageExecutor.execute(queueTest)) + { + i++; + } + } + } + + + public void execute() + { + long now = KcpUntils.currentMs(); + if (now - start > 1000) + { + Console.WriteLine("i "+(i-index) +"time "+(now-start)); + index = i; + start = now; + } + } + + public static void test() + { + _messageExecutor = new DistuptorMessageExecutor(); + _messageExecutor.start(); + en(); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/MessageExecutorTest.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/MessageExecutorTest.cs.meta new file mode 100644 index 0000000..b952073 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/MessageExecutorTest.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: b0ce1704ce4053046a60b6032f5b88f1 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/RingBuffer.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/RingBuffer.cs new file mode 100644 index 0000000..325f767 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/RingBuffer.cs @@ -0,0 +1,316 @@ +using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; +using System.Threading; + +namespace dotNetty_kcp.thread +{ + public class RingBuffer + { + private readonly T[] _entries; + private readonly int _modMask; + private Volatile.PaddedLong _consumerCursor = new Volatile.PaddedLong(); + private Volatile.PaddedLong _producerCursor = new Volatile.PaddedLong(); + + /// + /// Creates a new RingBuffer with the given capacity + /// + /// The capacity of the buffer + /// Only a single thread may attempt to consume at any one time + public RingBuffer(int capacity) + { + capacity = NextPowerOfTwo(capacity); + _modMask = capacity - 1; + _entries = new T[capacity]; + } + + /// + /// The maximum number of items that can be stored + /// + public int Capacity + { + get { return _entries.Length; } + } + + public T this[long index] + { + get + { + unchecked + { + return _entries[index & _modMask]; + } + } + set + { + unchecked + { + _entries[index & _modMask] = value; + } + } + } + + /// + /// Removes an item from the buffer. + /// + /// The next available item + public T Dequeue() + { + var next = _consumerCursor.ReadAcquireFence() + 1; + while (_producerCursor.ReadAcquireFence() < next + ) // makes sure we read the data from _entries after we have read the producer cursor + { + Thread.SpinWait(1); + } + + var result = this[next]; + _consumerCursor + .WriteReleaseFence( + next); // makes sure we read the data from _entries before we update the consumer cursor + return result; + } + + /// + /// Attempts to remove an items from the queue + /// + /// the items + /// True if successful + public bool TryDequeue(out T obj) + { + var next = _consumerCursor.ReadAcquireFence() + 1; + + if (_producerCursor.ReadAcquireFence() < next) + { + obj = default(T); + return false; + } + + obj = Dequeue(); + return true; + } + + /// + /// Add an item to the buffer + /// + /// + public void Enqueue(T item) + { + var next = _producerCursor.ReadAcquireFence() + 1; + + long wrapPoint = next - _entries.Length; + long min = _consumerCursor.ReadAcquireFence(); + + while (wrapPoint > min) + { + min = _consumerCursor.ReadAcquireFence(); + Thread.SpinWait(1); + } + + this[next] = item; + _producerCursor + .WriteReleaseFence( + next); // makes sure we write the data in _entries before we update the producer cursor + } + + /// + /// Add an item to the buffer + /// + /// + public bool tryEnqueue(T item) + { + var next = _producerCursor.ReadAcquireFence() + 1; + + long wrapPoint = next - _entries.Length; + long min = _consumerCursor.ReadAcquireFence(); + + if (wrapPoint>min) + { + return false; + } + + this[next] = item; + _producerCursor + .WriteReleaseFence( + next); // makes sure we write the data in _entries before we update the producer cursor + return true; + } + + + /// + /// The number of items in the buffer + /// + /// for indicative purposes only, may contain stale data + public int Count + { + get { return (int) (_producerCursor.ReadFullFence() - _consumerCursor.ReadFullFence()); } + } + + private static int NextPowerOfTwo(int x) + { + var result = 2; + while (result < x) + { + result <<= 1; + } + + return result; + } + } + + public static class Volatile + { + private const int CacheLineSize = 64; + + [StructLayout(LayoutKind.Explicit, Size = CacheLineSize * 2)] + public struct PaddedLong + { + [FieldOffset(CacheLineSize)] private long _value; + + /// + /// Create a new with the given initial value. + /// + /// Initial value + public PaddedLong(long value) + { + _value = value; + } + + /// + /// Read the value without applying any fence + /// + /// The current value + public long ReadUnfenced() + { + return _value; + } + + /// + /// Read the value applying acquire fence semantic + /// + /// The current value + public long ReadAcquireFence() + { + var value = _value; + Thread.MemoryBarrier(); + return value; + } + + /// + /// Read the value applying full fence semantic + /// + /// The current value + public long ReadFullFence() + { + Thread.MemoryBarrier(); + return _value; + } + + /// + /// Read the value applying a compiler only fence, no CPU fence is applied + /// + /// The current value + [MethodImpl(MethodImplOptions.NoOptimization)] + public long ReadCompilerOnlyFence() + { + return _value; + } + + /// + /// Write the value applying release fence semantic + /// + /// The new value + public void WriteReleaseFence(long newValue) + { + Thread.MemoryBarrier(); + _value = newValue; + } + + /// + /// Write the value applying full fence semantic + /// + /// The new value + public void WriteFullFence(long newValue) + { + Thread.MemoryBarrier(); + _value = newValue; + } + + /// + /// Write the value applying a compiler fence only, no CPU fence is applied + /// + /// The new value + [MethodImpl(MethodImplOptions.NoOptimization)] + public void WriteCompilerOnlyFence(long newValue) + { + _value = newValue; + } + + /// + /// Write without applying any fence + /// + /// The new value + public void WriteUnfenced(long newValue) + { + _value = newValue; + } + + /// + /// Atomically set the value to the given updated value if the current value equals the comparand + /// + /// The new value + /// The comparand (expected value) + /// + public bool AtomicCompareExchange(long newValue, long comparand) + { + return Interlocked.CompareExchange(ref _value, newValue, comparand) == comparand; + } + + /// + /// Atomically set the value to the given updated value + /// + /// The new value + /// The original value + public long AtomicExchange(long newValue) + { + return Interlocked.Exchange(ref _value, newValue); + } + + /// + /// Atomically add the given value to the current value and return the sum + /// + /// The value to be added + /// The sum of the current value and the given value + public long AtomicAddAndGet(long delta) + { + return Interlocked.Add(ref _value, delta); + } + + /// + /// Atomically increment the current value and return the new value + /// + /// The incremented value. + public long AtomicIncrementAndGet() + { + return Interlocked.Increment(ref _value); + } + + /// + /// Atomically increment the current value and return the new value + /// + /// The decremented value. + public long AtomicDecrementAndGet() + { + return Interlocked.Decrement(ref _value); + } + + /// + /// Returns the string representation of the current value. + /// + /// the string representation of the current value. + public override string ToString() + { + var value = ReadFullFence(); + return value.ToString(); + } + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/RingBuffer.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/RingBuffer.cs.meta new file mode 100644 index 0000000..e026902 --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/RingBuffer.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: f9e7ef0521ce2e448ab17ea8fcb3cce8 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/ThreadMessageExecutor.cs b/Runtime/csharp-kcp/dotNetty-kcp/thread/ThreadMessageExecutor.cs new file mode 100644 index 0000000..8505a1d --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/ThreadMessageExecutor.cs @@ -0,0 +1,56 @@ +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using base_kcp; +using dotNetty_kcp.thread; + +namespace dotNetty_kcp.thread +{ + /** + * 多生产者单消费者 560万 tps + */ + public class ThreadMessageExecutor:AbstratcMessageExecutor + { + + private MpscArrayQueue _queue; + + private const int MAX_QUEUE_SIZE = 2 << 10; + + + /** + * 启动消息处理器 + */ + public override void start() + { + _queue = new MpscArrayQueue(MAX_QUEUE_SIZE); + base.start(); + } + + + /** + * 判断队列是否已经达到上限了 + * @return + */ + public override bool isFull() + { + return _queue.Count == MAX_QUEUE_SIZE; + } + + protected override bool isEmpty() + { + + return _queue.IsEmpty; + } + + protected override bool TryDequeue(out ITask task) + { + return _queue.TryDequeue(out task); + } + + protected override bool TryEnqueue(ITask task) + { + return _queue.TryEnqueue(task); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/dotNetty-kcp/thread/ThreadMessageExecutor.cs.meta b/Runtime/csharp-kcp/dotNetty-kcp/thread/ThreadMessageExecutor.cs.meta new file mode 100644 index 0000000..7cfc47b --- /dev/null +++ b/Runtime/csharp-kcp/dotNetty-kcp/thread/ThreadMessageExecutor.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 9bd7eddf31948f34888c3d637e406b9c +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp.meta b/Runtime/csharp-kcp/reedsolomon_csharp.meta new file mode 100644 index 0000000..640479f --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp.meta @@ -0,0 +1,8 @@ +fileFormatVersion: 2 +guid: 9c9678524608e4f2dbd8bc338cfe6f29 +folderAsset: yes +DefaultImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoop.cs b/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoop.cs new file mode 100644 index 0000000..057f317 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoop.cs @@ -0,0 +1,69 @@ +namespace fec +{ + public interface CodingLoop + { + + + + /** + * Multiplies a subset of rows from a coding matrix by a full set of + * input shards to produce some output shards. + * + * @param matrixRows The rows from the matrix to use. + * @param inputs An array of byte arrays, each of which is one input shard. + * The inputs array may have extra buffers after the ones + * that are used. They will be ignored. The number of + * inputs used is determined by the length of the + * each matrix row. + * @param inputCount The number of input byte arrays. + * @param outputs Byte arrays where the computed shards are stored. The + * outputs array may also have extra, unused, elements + * at the end. The number of outputs computed, and the + * number of matrix rows used, is determined by + * outputCount. + * @param outputCount The number of outputs to compute. + * @param offset The index in the inputs and output of the first byte + * to process. + * @param byteCount The number of bytes to process. + */ + void codeSomeShards(byte[][] matrixRows, + byte[][] inputs, + int inputCount, + byte[][] outputs, + int outputCount, + int offset, + int byteCount); + + /** + * Multiplies a subset of rows from a coding matrix by a full set of + * input shards to produce some output shards, and checks that the + * the data is those shards matches what's expected. + * + * @param matrixRows The rows from the matrix to use. + * @param inputs An array of byte arrays, each of which is one input shard. + * The inputs array may have extra buffers after the ones + * that are used. They will be ignored. The number of + * inputs used is determined by the length of the + * each matrix row. + * @param inputCount THe number of input byte arrays. + * @param toCheck Byte arrays where the computed shards are stored. The + * outputs array may also have extra, unused, elements + * at the end. The number of outputs computed, and the + * number of matrix rows used, is determined by + * outputCount. + * @param checkCount The number of outputs to compute. + * @param offset The index in the inputs and output of the first byte + * to process. + * @param byteCount The number of bytes to process. + * @param tempBuffer A place to store temporary results. May be null. + */ + bool checkSomeShards(byte[][] matrixRows, + byte[][] inputs, + int inputCount, + byte[][] toCheck, + int checkCount, + int offset, + int byteCount, + byte[] tempBuffer); + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoop.cs.meta b/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoop.cs.meta new file mode 100644 index 0000000..6f13d22 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoop.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 7a2cd17a3ffad422386b65478ffecd7b +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoopBase.cs b/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoopBase.cs new file mode 100644 index 0000000..7d3dea0 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoopBase.cs @@ -0,0 +1,41 @@ +namespace fec +{ + public abstract class CodingLoopBase : CodingLoop + { + public abstract void codeSomeShards(byte[][] matrixRows, byte[][] inputs, int inputCount, byte[][] outputs, int outputCount, + int offset, int byteCount); + + public virtual bool checkSomeShards( + byte[][] matrixRows, + byte[][] inputs, int inputCount, + byte[][] toCheck, int checkCount, + int offset, int byteCount, + byte[] tempBuffer) + { + // This is the loop structure for ByteOutputInput, which does not + // require temporary buffers for checking. + byte[][] table = Galois.MULTIPLICATION_TABLE; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + for (int iOutput = 0; iOutput < checkCount; iOutput++) + { + byte[] matrixRow = matrixRows[iOutput]; + var value = 0; + for (int iInput = 0; iInput < inputCount; iInput++) + { + value ^= table[matrixRow[iInput] & 0xFF][inputs[iInput][iByte] & 0xFF]; + } + + if (toCheck[iOutput][iByte] != value) + { + return false; + } + } + } + + return true; + } + + + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoopBase.cs.meta b/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoopBase.cs.meta new file mode 100644 index 0000000..2b2d435 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/CodingLoopBase.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 00f2884c68e1a463c9fe7778b95a016d +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/Galois.cs b/Runtime/csharp-kcp/reedsolomon_csharp/Galois.cs new file mode 100644 index 0000000..9b895d1 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/Galois.cs @@ -0,0 +1,337 @@ +using System; +using System.Collections.Generic; + +namespace fec +{ + public sealed class Galois + { + /** + * The number of elements in the field. + */ + + public const int FIELD_SIZE = 256; + + /** + * The polynomial used to generate the logarithm table. + * + * There are a number of polynomials that work to generate + * a Galois field of 256 elements. The choice is arbitrary, + * and we just use the first one. + * + * The possibilities are: 29, 43, 45, 77, 95, 99, 101, 105, + * 113, 135, 141, 169, 195, 207, 231, and 245. + */ + + public const int GENERATING_POLYNOMIAL = 29; + + + public static readonly short[] LOG_TABLE = new short[] + { + -1, 0, 1, 25, 2, 50, 26, 198, + 3, 223, 51, 238, 27, 104, 199, 75, + 4, 100, 224, 14, 52, 141, 239, 129, + 28, 193, 105, 248, 200, 8, 76, 113, + 5, 138, 101, 47, 225, 36, 15, 33, + 53, 147, 142, 218, 240, 18, 130, 69, + 29, 181, 194, 125, 106, 39, 249, 185, + 201, 154, 9, 120, 77, 228, 114, 166, + 6, 191, 139, 98, 102, 221, 48, 253, + 226, 152, 37, 179, 16, 145, 34, 136, + 54, 208, 148, 206, 143, 150, 219, 189, + 241, 210, 19, 92, 131, 56, 70, 64, + 30, 66, 182, 163, 195, 72, 126, 110, + 107, 58, 40, 84, 250, 133, 186, 61, + 202, 94, 155, 159, 10, 21, 121, 43, + 78, 212, 229, 172, 115, 243, 167, 87, + 7, 112, 192, 247, 140, 128, 99, 13, + 103, 74, 222, 237, 49, 197, 254, 24, + 227, 165, 153, 119, 38, 184, 180, 124, + 17, 68, 146, 217, 35, 32, 137, 46, + 55, 63, 209, 91, 149, 188, 207, 205, + 144, 135, 151, 178, 220, 252, 190, 97, + 242, 86, 211, 171, 20, 42, 93, 158, + 132, 60, 57, 83, 71, 109, 65, 162, + 31, 45, 67, 216, 183, 123, 164, 118, + 196, 23, 73, 236, 127, 12, 111, 246, + 108, 161, 59, 82, 41, 157, 85, 170, + 251, 96, 134, 177, 187, 204, 62, 90, + 203, 89, 95, 176, 156, 169, 160, 81, + 11, 245, 22, 235, 122, 117, 44, 215, + 79, 174, 213, 233, 230, 231, 173, 232, + 116, 214, 244, 234, 168, 80, 88, 175 + }; + + + + public static readonly sbyte[] EXP_TABLE = new sbyte[] + { + 1, 2, 4, 8, 16, 32, 64, -128, + 29, 58, 116, -24, -51, -121, 19, 38, + 76, -104, 45, 90, -76, 117, -22, -55, + -113, 3, 6, 12, 24, 48, 96, -64, + -99, 39, 78, -100, 37, 74, -108, 53, + 106, -44, -75, 119, -18, -63, -97, 35, + 70, -116, 5, 10, 20, 40, 80, -96, + 93, -70, 105, -46, -71, 111, -34, -95, + 95, -66, 97, -62, -103, 47, 94, -68, + 101, -54, -119, 15, 30, 60, 120, -16, + -3, -25, -45, -69, 107, -42, -79, 127, + -2, -31, -33, -93, 91, -74, 113, -30, + -39, -81, 67, -122, 17, 34, 68, -120, + 13, 26, 52, 104, -48, -67, 103, -50, + -127, 31, 62, 124, -8, -19, -57, -109, + 59, 118, -20, -59, -105, 51, 102, -52, + -123, 23, 46, 92, -72, 109, -38, -87, + 79, -98, 33, 66, -124, 21, 42, 84, + -88, 77, -102, 41, 82, -92, 85, -86, + 73, -110, 57, 114, -28, -43, -73, 115, + -26, -47, -65, 99, -58, -111, 63, 126, + -4, -27, -41, -77, 123, -10, -15, -1, + -29, -37, -85, 75, -106, 49, 98, -60, + -107, 55, 110, -36, -91, 87, -82, 65, + -126, 25, 50, 100, -56, -115, 7, 14, + 28, 56, 112, -32, -35, -89, 83, -90, + 81, -94, 89, -78, 121, -14, -7, -17, + -61, -101, 43, 86, -84, 69, -118, 9, + 18, 36, 72, -112, 61, 122, -12, -11, + -9, -13, -5, -21, -53, -117, 11, 22, + 44, 88, -80, 125, -6, -23, -49, -125, + 27, 54, 108, -40, -83, 71, -114, + // Repeat the table a second time, so multiply() + // does not have to check bounds. + 1, 2, 4, 8, 16, 32, 64, -128, + 29, 58, 116, -24, -51, -121, 19, 38, + 76, -104, 45, 90, -76, 117, -22, -55, + -113, 3, 6, 12, 24, 48, 96, -64, + -99, 39, 78, -100, 37, 74, -108, 53, + 106, -44, -75, 119, -18, -63, -97, 35, + 70, -116, 5, 10, 20, 40, 80, -96, + 93, -70, 105, -46, -71, 111, -34, -95, + 95, -66, 97, -62, -103, 47, 94, -68, + 101, -54, -119, 15, 30, 60, 120, -16, + -3, -25, -45, -69, 107, -42, -79, 127, + -2, -31, -33, -93, 91, -74, 113, -30, + -39, -81, 67, -122, 17, 34, 68, -120, + 13, 26, 52, 104, -48, -67, 103, -50, + -127, 31, 62, 124, -8, -19, -57, -109, + 59, 118, -20, -59, -105, 51, 102, -52, + -123, 23, 46, 92, -72, 109, -38, -87, + 79, -98, 33, 66, -124, 21, 42, 84, + -88, 77, -102, 41, 82, -92, 85, -86, + 73, -110, 57, 114, -28, -43, -73, 115, + -26, -47, -65, 99, -58, -111, 63, 126, + -4, -27, -41, -77, 123, -10, -15, -1, + -29, -37, -85, 75, -106, 49, 98, -60, + -107, 55, 110, -36, -91, 87, -82, 65, + -126, 25, 50, 100, -56, -115, 7, 14, + 28, 56, 112, -32, -35, -89, 83, -90, + 81, -94, 89, -78, 121, -14, -7, -17, + -61, -101, 43, 86, -84, 69, -118, 9, + 18, 36, 72, -112, 61, 122, -12, -11, + -9, -13, -5, -21, -53, -117, 11, 22, + 44, 88, -80, 125, -6, -23, -49, -125, + 27, 54, 108, -40, -83, 71, -114 + }; + + + public static readonly byte[] EXP_TABLE_BYTE = generateExpTable(); + + /** + * A multiplication table for the Galois field. + * + * Using this table is an alternative to using the multiply() method, + * which uses log/exp table lookups. + */ + public static readonly byte[][] MULTIPLICATION_TABLE= generateMultiplicationTable(); + + + /** + * Adds two elements of the field. If you're in an inner loop, + * you should inline this function: it's just XOR. + */ + public static sbyte add(sbyte a, sbyte b) + { + return (sbyte) (a ^ b); + } + + /** + * Inverse of addition. If you're in an inner loop, + * you should inline this function: it's just XOR. + */ + public static byte subtract(sbyte a, sbyte b) + { + return (byte) (a ^ b); + } + + /** + * Multiplies two elements of the field. + */ + public static byte multiply(byte a, byte b) + { + if (a == 0 || b == 0) + { + return 0; + } + + int logA = LOG_TABLE[a]; + int logB = LOG_TABLE[b]; + int logResult = logA + logB; + return EXP_TABLE_BYTE[logResult]; + } + + /** + * Inverse of multiplication. + */ + public static byte divide(byte a, byte b) + { + if (a == 0) + { + return 0; + } + + if (b == 0) + { + throw new Exception("Argument 'divisor' is 0"); + } + + int logA = LOG_TABLE[a]; + int logB = LOG_TABLE[b]; + int logResult = logA - logB; + if (logResult < 0) + { + logResult += 255; + } + + return EXP_TABLE_BYTE[logResult]; + } + + /** + * Computes a**n. + * + * The result will be the same as multiplying a times itself n times. + * + * @param a A member of the field. + * @param n A plain-old integer. + * @return The result of multiplying a by itself n times. + */ + public static byte exp(byte a, int n) + { + if (n == 0) + { + return 1; + } + + if (a == 0) + { + return 0; + } + + int logA = LOG_TABLE[a]; + int logResult = logA * n; + while (255 <= logResult) + { + logResult -= 255; + } + + return EXP_TABLE_BYTE[logResult]; + } + + /** + * Generates a logarithm table given a starting polynomial. + */ + public static short[] generateLogTable(int polynomial) + { + short[] result = new short[FIELD_SIZE]; + for (int i = 0; i < FIELD_SIZE; i++) + { + result[i] = -1; // -1 means "not set" + } + + int b = 1; + for (int log = 0; log < FIELD_SIZE - 1; log++) + { + if (result[b] != -1) + { + throw new Exception("BUG: duplicate logarithm (bad polynomial?)"); + } + + result[b] = (short) log; + b = (b << 1); + if (FIELD_SIZE <= b) + { + b = ((b - FIELD_SIZE) ^ polynomial); + } + } + + return result; + } + + /** + * Generates the inverse log table. + */ + public static byte[] generateExpTable(short[] logTable) + { + byte[] result = new byte [FIELD_SIZE * 2 - 2]; + for (int i = 1; i < FIELD_SIZE; i++) + { + int log = logTable[i]; + result[log] = (byte) i; + result[log + FIELD_SIZE - 1] = (byte) i; + } + + return result; + } + + + public static byte[] generateExpTable() + { + byte[] result = new byte[EXP_TABLE.Length]; + for (var i = 0; i < EXP_TABLE.Length; i++) + { + result[i] = (byte) EXP_TABLE[i]; + } + return result; + } + + + public static byte[][] generateMultiplicationTable() + { + byte[][] result = new byte [256][]; + for (int a = 0; a < FIELD_SIZE; a++) + { + result[a] = new byte[256]; + for (int b = 0; b < FIELD_SIZE; b++) + { + result[a][b] = multiply((byte) a, (byte) b); + } + } + + return result; + } + + /** + * Returns a list of all polynomials that can be used to generate + * the field. + * + * This is never used in the code; it's just here for completeness. + */ + public static int[] allPossiblePolynomials() + { + List result = new List(); + for (int i = 0; i < FIELD_SIZE; i++) + { + try + { + generateLogTable(i); + result.Add(i); + } + catch (Exception e) + { + // this one didn't work + } + } + + return result.ToArray(); + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/Galois.cs.meta b/Runtime/csharp-kcp/reedsolomon_csharp/Galois.cs.meta new file mode 100644 index 0000000..3a4a72e --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/Galois.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: bb5dd080f42e54590a30e1ffe4f6c500 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/InputOutputByteTableCodingLoop.cs b/Runtime/csharp-kcp/reedsolomon_csharp/InputOutputByteTableCodingLoop.cs new file mode 100644 index 0000000..b48e7a5 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/InputOutputByteTableCodingLoop.cs @@ -0,0 +1,94 @@ +namespace fec +{ + public class InputOutputByteTableCodingLoop : CodingLoopBase + { + public override void codeSomeShards(byte[][] matrixRows, byte[][] inputs, int inputCount, byte[][] outputs, + int outputCount, + int offset, int byteCount) + { + byte[][] table = Galois.MULTIPLICATION_TABLE; + { + int iInput = 0; + byte[] inputShard = inputs[iInput]; + for (int iOutput = 0; iOutput < outputCount; iOutput++) + { + byte[] outputShard = outputs[iOutput]; + byte[] matrixRow = matrixRows[iOutput]; + byte[] multTableRow = table[matrixRow[iInput] ]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + outputShard[iByte] = multTableRow[inputShard[iByte]]; + } + } + } + + for (int iInput = 1; iInput < inputCount; iInput++) + { + byte[] inputShard = inputs[iInput]; + for (int iOutput = 0; iOutput < outputCount; iOutput++) + { + byte[] outputShard = outputs[iOutput]; + byte[] matrixRow = matrixRows[iOutput]; + byte[] multTableRow = table[matrixRow[iInput]]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + outputShard[iByte] ^= multTableRow[inputShard[iByte]]; + } + } + } + } + + public override bool checkSomeShards( + byte[][] matrixRows, + byte[][] inputs, int inputCount, + byte[][] toCheck, int checkCount, + int offset, int byteCount, + byte[] tempBuffer) + { + if (tempBuffer == null) + { + return base.checkSomeShards(matrixRows, inputs, inputCount, toCheck, checkCount, offset, byteCount, + null); + } + + // This is actually the code from OutputInputByteTableCodingLoop. + // Using the loops from this class would require multiple temp + // buffers. + + byte[][] table = Galois.MULTIPLICATION_TABLE; + for (int iOutput = 0; iOutput < checkCount; iOutput++) + { + byte[] outputShard = toCheck[iOutput]; + byte[] matrixRow = matrixRows[iOutput]; + { + int iInput = 0; + byte[] inputShard = inputs[iInput]; + byte[] multTableRow = table[matrixRow[iInput]]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + tempBuffer[iByte] = multTableRow[inputShard[iByte] ]; + } + } + for (int iInput = 1; iInput < inputCount; iInput++) + { + byte[] inputShard = inputs[iInput]; + byte[] multTableRow = table[matrixRow[iInput]]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + tempBuffer[iByte] ^= multTableRow[inputShard[iByte]]; + } + } + + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + if (tempBuffer[iByte] != outputShard[iByte]) + { + return false; + } + } + } + + return true; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/InputOutputByteTableCodingLoop.cs.meta b/Runtime/csharp-kcp/reedsolomon_csharp/InputOutputByteTableCodingLoop.cs.meta new file mode 100644 index 0000000..f68d600 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/InputOutputByteTableCodingLoop.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 42c689a70eb544a5595da1488bf5e1ad +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/Matrix.cs b/Runtime/csharp-kcp/reedsolomon_csharp/Matrix.cs new file mode 100644 index 0000000..da6c0c8 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/Matrix.cs @@ -0,0 +1,336 @@ +using System; +using System.Linq; +using System.Text; + +namespace fec +{ + public class Matrix + { + /** + * The number of rows in the matrix. + */ + private readonly int rows; + + /** + * The number of columns in the matrix. + */ + private readonly int columns; + + /** + * The data in the matrix, in row major form. + * + * To get element (r, c): data[r][c] + * + * Because this this is computer science, and not math, + * the indices for both the row and column start at 0. + */ + private readonly byte [] [] data; + + /** + * Initialize a matrix of zeros. + * + * @param initRows The number of rows in the matrix. + * @param initColumns The number of columns in the matrix. + */ + public Matrix(int initRows, int initColumns) { + rows = initRows; + columns = initColumns; + data = new byte [rows] []; + for (int r = 0; r < rows; r++) { + data[r] = new byte [columns]; + } + } + + /** + * Initializes a matrix with the given row-major data. + */ + public Matrix(byte [] [] initData) { + rows = initData.Length; + columns = initData[0].Length; + data = new byte [rows] []; + for (int r = 0; r < rows; r++) { + if (initData[r].Length != columns) { + throw new Exception("Not all rows have the same number of columns"); + } + data[r] = new byte[columns]; + for (int c = 0; c < columns; c++) { + data[r][c] = initData[r][c]; + } + } + } + + /** + * Returns an identity matrix of the given size. + */ + public static Matrix identity(int size) { + Matrix result = new Matrix(size, size); + for (int i = 0; i < size; i++) { + result.set(i, i, 1); + } + return result; + } + + /** + * Returns a human-readable string of the matrix contents. + * + * Example: [[1, 2], [3, 4]] + */ + public string toString() { + StringBuilder result = new StringBuilder(); + result.Append('['); + for (int r = 0; r < rows; r++) { + if (r != 0) { + result.Append(", "); + } + result.Append('['); + for (int c = 0; c < columns; c++) { + if (c != 0) { + result.Append(", "); + } + result.Append(data[r][c] & 0xFF); + } + result.Append(']'); + } + result.Append(']'); + return result.ToString(); + } + + /** + * Returns a human-readable string of the matrix contents. + * + * Example: + * 00 01 02 + * 03 04 05 + * 06 07 08 + * 09 0a 0b + */ + public String toBigString() { + StringBuilder result = new StringBuilder(); + for (int r = 0; r < rows; r++) { + for (int c = 0; c < columns; c++) { + int value = get(r, c); + if (value < 0) { + value += 256; + } + result.Append(String.Format("%02x ", value)); + } + result.Append("\n"); + } + return result.ToString(); + } + + /** + * Returns the number of columns in this matrix. + */ + public int getColumns() { + return columns; + } + + /** + * Returns the number of rows in this matrix. + */ + public int getRows() { + return rows; + } + + /** + * Returns the value at row r, column c. + */ + public byte get(int r, int c) { + if (r < 0 || rows <= r) { + throw new Exception("Row index out of range: " + r); + } + if (c < 0 || columns <= c) { + throw new Exception("Column index out of range: " + c); + } + return data[r][c]; + } + + /** + * Sets the value at row r, column c. + */ + public void set(int r, int c, byte value) { + if (r < 0 || rows <= r) { + throw new Exception("Row index out of range: " + r); + } + if (c < 0 || columns <= c) { + throw new Exception("Column index out of range: " + c); + } + data[r][c] = value; + } + + /** + * Returns true iff this matrix is identical to the other. + */ + public bool equals(Object other) { + if (!(other is Matrix)) { + return false; + } + for (int r = 0; r < rows; r++) { + if (!Object.Equals(data[r], ((Matrix)other).data[r])) { + return false; + } + } + return true; + } + + /** + * Multiplies this matrix (the one on the left) by another + * matrix (the one on the right). + */ + public Matrix times(Matrix right) { + if (getColumns() != right.getRows()) { + throw new Exception( + "Columns on left (" + getColumns() +") " + + "is different than rows on right (" + right.getRows() + ")"); + } + Matrix result = new Matrix(getRows(), right.getColumns()); + for (int r = 0; r < getRows(); r++) { + for (int c = 0; c < right.getColumns(); c++) { + byte value = 0; + for (int i = 0; i < getColumns(); i++) { + value ^= Galois.multiply(get(r, i), right.get(i, c)); + } + result.set(r, c, value); + } + } + return result; + } + + /** + * Returns the concatenation of this matrix and the matrix on the right. + */ + public Matrix augment(Matrix right) { + if (rows != right.rows) { + throw new Exception("Matrices don't have the same number of rows"); + } + Matrix result = new Matrix(rows, columns + right.columns); + for (int r = 0; r < rows; r++) { + for (int c = 0; c < columns; c++) { + result.data[r][c] = data[r][c]; + } + for (int c = 0; c < right.columns; c++) { + result.data[r][columns + c] = right.data[r][c]; + } + } + return result; + } + + /** + * Returns a part of this matrix. + */ + public Matrix submatrix(int rmin, int cmin, int rmax, int cmax) { + Matrix result = new Matrix(rmax - rmin, cmax - cmin); + for (int r = rmin; r < rmax; r++) { + for (int c = cmin; c < cmax; c++) { + result.data[r - rmin][c - cmin] = data[r][c]; + } + } + return result; + } + + /** + * Returns one row of the matrix as a byte array. + */ + public byte [] getRow(int row) { + byte [] result = new byte [columns]; + for (int c = 0; c < columns; c++) { + result[c] = get(row, c); + } + return result; + } + + /** + * Exchanges two rows in the matrix. + */ + public void swapRows(int r1, int r2) { + if (r1 < 0 || rows <= r1 || r2 < 0 || rows <= r2) { + throw new Exception("Row index out of range"); + } + byte [] tmp = data[r1]; + data[r1] = data[r2]; + data[r2] = tmp; + } + + /** + * Returns the inverse of this matrix. + * + * @throws IllegalArgumentException when the matrix is singular and + * doesn't have an inverse. + */ + public Matrix invert() { + // Sanity check. + if (rows != columns) { + throw new Exception("Only square matrices can be inverted"); + } + + // Create a working matrix by augmenting this one with + // an identity matrix on the right. + Matrix work = augment(identity(rows)); + + // Do Gaussian elimination to transform the left half into + // an identity matrix. + work.gaussianElimination(); + + // The right half is now the inverse. + return work.submatrix(0, rows, columns, columns * 2); + } + + /** + * Does the work of matrix inversion. + * + * Assumes that this is an r by 2r matrix. + */ + private void gaussianElimination() { + // Clear out the part below the main diagonal and scale the main + // diagonal to be 1. + for (int r = 0; r < rows; r++) { + // If the element on the diagonal is 0, find a row below + // that has a non-zero and swap them. + if (data[r][r] == 0) { + for (int rowBelow = r + 1; rowBelow < rows; rowBelow++) { + if (data[rowBelow][r] != 0) { + swapRows(r, rowBelow); + break; + } + } + } + // If we couldn't find one, the matrix is singular. + if (data[r][r] == 0) { + throw new Exception("Matrix is singular"); + } + // Scale to 1. + if (data[r][r] != 1) { + byte scale = Galois.divide(1, data[r][r]); + for (int c = 0; c < columns; c++) { + data[r][c] = Galois.multiply(data[r][c], scale); + } + } + // Make everything below the 1 be a 0 by subtracting + // a multiple of it. (Subtraction and addition are + // both exclusive or in the Galois field.) + for (int rowBelow = r + 1; rowBelow < rows; rowBelow++) { + if (data[rowBelow][r] != 0) { + byte scale = data[rowBelow][r]; + for (int c = 0; c < columns; c++) { + data[rowBelow][c] ^= Galois.multiply(scale, data[r][c]); + } + } + } + } + + // Now clear the part above the main diagonal. + for (int d = 0; d < rows; d++) { + for (int rowAbove = 0; rowAbove < d; rowAbove++) { + if (data[rowAbove][d] != 0) { + byte scale = data[rowAbove][d]; + for (int c = 0; c < columns; c++) { + data[rowAbove][c] ^= Galois.multiply(scale, data[d][c]); + } + + } + } + } + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/Matrix.cs.meta b/Runtime/csharp-kcp/reedsolomon_csharp/Matrix.cs.meta new file mode 100644 index 0000000..adad060 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/Matrix.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 7053b17e1ffa849619391d251805e988 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/OutputInputByteTableCodingLoop.cs b/Runtime/csharp-kcp/reedsolomon_csharp/OutputInputByteTableCodingLoop.cs new file mode 100644 index 0000000..edb3c69 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/OutputInputByteTableCodingLoop.cs @@ -0,0 +1,93 @@ +/** + * One specific ordering/nesting of the coding loops. + * + * Copyright 2015, Backblaze, Inc. All rights reserved. + */ + +namespace fec +{ + public class OutputInputByteTableCodingLoop : CodingLoopBase + { + public override void codeSomeShards( + byte[][] matrixRows, + byte[][] inputs, int inputCount, + byte[][] outputs, int outputCount, + int offset, int byteCount) + { + byte[][] table = Galois.MULTIPLICATION_TABLE; + for (int iOutput = 0; iOutput < outputCount; iOutput++) + { + byte[] outputShard = outputs[iOutput]; + byte[] matrixRow = matrixRows[iOutput]; + { + int iInput = 0; + byte[] inputShard = inputs[iInput]; + byte[] multTableRow = table[matrixRow[iInput] ]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + outputShard[iByte] = multTableRow[inputShard[iByte]]; + } + } + for (int iInput = 1; iInput < inputCount; iInput++) + { + byte[] inputShard = inputs[iInput]; + byte[] multTableRow = table[matrixRow[iInput] & 0xFF]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + outputShard[iByte] ^= multTableRow[inputShard[iByte]]; + } + } + } + } + + + public override bool checkSomeShards( + byte[][] matrixRows, + byte[][] inputs, int inputCount, + byte[][] toCheck, int checkCount, + int offset, int byteCount, + byte[] tempBuffer) + { + if (tempBuffer == null) + { + return base.checkSomeShards(matrixRows, inputs, inputCount, toCheck, checkCount, offset, byteCount, + null); + } + + byte[][] table = Galois.MULTIPLICATION_TABLE; + for (int iOutput = 0; iOutput < checkCount; iOutput++) + { + byte[] outputShard = toCheck[iOutput]; + byte[] matrixRow = matrixRows[iOutput]; + { + int iInput = 0; + byte[] inputShard = inputs[iInput]; + byte[] multTableRow = table[matrixRow[iInput]]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + tempBuffer[iByte] = multTableRow[inputShard[iByte]]; + } + } + for (int iInput = 1; iInput < inputCount; iInput++) + { + byte[] inputShard = inputs[iInput]; + byte[] multTableRow = table[matrixRow[iInput]]; + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + tempBuffer[iByte] ^= multTableRow[inputShard[iByte]]; + } + } + + for (int iByte = offset; iByte < offset + byteCount; iByte++) + { + if (tempBuffer[iByte] != outputShard[iByte]) + { + return false; + } + } + } + + return true; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/OutputInputByteTableCodingLoop.cs.meta b/Runtime/csharp-kcp/reedsolomon_csharp/OutputInputByteTableCodingLoop.cs.meta new file mode 100644 index 0000000..099e86e --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/OutputInputByteTableCodingLoop.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: d6326f51b606b47ab9a39c79730295f3 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/Program.cs b/Runtime/csharp-kcp/reedsolomon_csharp/Program.cs new file mode 100644 index 0000000..f46578d --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/Program.cs @@ -0,0 +1,17 @@ +using System; +using System.Runtime.InteropServices; + +namespace fec +{ + class Program + { + static void Main(string[] args) + { + new ReedSolomonBenchmark().run(); +// new ReedSolomonTest().testBigEncodeDecode(); + } + + + + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/Program.cs.meta b/Runtime/csharp-kcp/reedsolomon_csharp/Program.cs.meta new file mode 100644 index 0000000..3902e6d --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/Program.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: 58d82334dffb640a98f63cbad813e803 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomon.cs b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomon.cs new file mode 100644 index 0000000..faafd88 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomon.cs @@ -0,0 +1,414 @@ +using System; + +namespace fec +{ + public class ReedSolomon + { + private readonly int dataShardCount; + private readonly int parityShardCount; + private readonly int totalShardCount; + private readonly Matrix matrix; + private readonly CodingLoop codingLoop; + + /** + * Rows from the matrix for encoding parity, each one as its own + * byte array to allow for efficient access while encoding. + */ + private readonly byte[][] parityRows; + + /** + * Creates a ReedSolomon codec with the default coding loop. + */ + public static ReedSolomon create(int dataShardCount, int parityShardCount) + { + return new ReedSolomon(dataShardCount, parityShardCount, new InputOutputByteTableCodingLoop()); + } + + /** + * Initializes a new encoder/decoder, with a chosen coding loop. + */ + public ReedSolomon(int dataShardCount, int parityShardCount, CodingLoop codingLoop) + { + // We can have at most 256 shards total, as any more would + // lead to duplicate rows in the Vandermonde matrix, which + // would then lead to duplicate rows in the built matrix + // below. Then any subset of the rows containing the duplicate + // rows would be singular. + if (256 < dataShardCount + parityShardCount) + { + throw new Exception("too many shards - max is 256"); + } + + this.dataShardCount = dataShardCount; + this.parityShardCount = parityShardCount; + this.codingLoop = codingLoop; + this.totalShardCount = dataShardCount + parityShardCount; + matrix = buildMatrix(dataShardCount, this.totalShardCount); + parityRows = new byte [parityShardCount][]; + for (int i = 0; i < parityShardCount; i++) + { + parityRows[i] = matrix.getRow(dataShardCount + i); + } + } + + /** + * Returns the number of data shards. + */ + public int getDataShardCount() + { + return dataShardCount; + } + + /** + * Returns the number of parity shards. + */ + public int getParityShardCount() + { + return parityShardCount; + } + + /** + * Returns the total number of shards. + */ + public int getTotalShardCount() + { + return totalShardCount; + } + + /** + * Encodes parity for a set of data shards. + * + * @param shards An array containing data shards followed by parity shards. + * Each shard is a byte array, and they must all be the same + * size. + * @param offset The index of the first byte in each shard to encode. + * @param byteCount The number of bytes to encode in each shard. + * + */ + public void encodeParity(byte[][] shards, int offset, int byteCount) + { + // Check arguments. + checkBuffersAndSizes(shards, offset, byteCount); + + // Build the array of output buffers. + byte[][] outputs = new byte [parityShardCount][]; + Array.Copy(shards, dataShardCount, outputs, 0, parityShardCount); + + // Do the coding. + codingLoop.codeSomeShards( + parityRows, + shards, dataShardCount, + outputs, parityShardCount, + offset, byteCount); + } + + + /** + * Returns true if the parity shards contain the right data. + * + * @param shards An array containing data shards followed by parity shards. + * Each shard is a byte array, and they must all be the same + * size. + * @param firstByte The index of the first byte in each shard to check. + * @param byteCount The number of bytes to check in each shard. + */ + public bool isParityCorrect(byte[][] shards, int firstByte, int byteCount) + { + // Check arguments. + checkBuffersAndSizes(shards, firstByte, byteCount); + + // Build the array of buffers being checked. + byte[][] toCheck = new byte [parityShardCount][]; + Array.Copy(shards, dataShardCount, toCheck, 0, parityShardCount); + + + // Do the checking. + return codingLoop.checkSomeShards( + parityRows, + shards, dataShardCount, + toCheck, parityShardCount, + firstByte, byteCount, + null); + } + + /** + * Returns true if the parity shards contain the right data. + * + * This method may be significantly faster than the one above that does + * not use a temporary buffer. + * + * @param shards An array containing data shards followed by parity shards. + * Each shard is a byte array, and they must all be the same + * size. + * @param firstByte The index of the first byte in each shard to check. + * @param byteCount The number of bytes to check in each shard. + * @param tempBuffer A temporary buffer (the same size as each of the + * shards) to use when computing parity. + */ + public bool isParityCorrect(byte[][] shards, int firstByte, int byteCount, byte[] tempBuffer) + { + // Check arguments. + checkBuffersAndSizes(shards, firstByte, byteCount); + if (tempBuffer.Length < firstByte + byteCount) + { + throw new Exception("tempBuffer is not big enough"); + } + + // Build the array of buffers being checked. + byte[][] toCheck = new byte [parityShardCount][]; + Array.Copy(shards, dataShardCount, toCheck, 0, parityShardCount); + + // Do the checking. + return codingLoop.checkSomeShards( + parityRows, + shards, dataShardCount, + toCheck, parityShardCount, + firstByte, byteCount, + tempBuffer); + } + + + /** + * Given a list of shards, some of which contain data, fills in the + * ones that don't have data. + * + * Quickly does nothing if all of the shards are present. + * + * If any shards are missing (based on the flags in shardsPresent), + * the data in those shards is recomputed and filled in. + */ + public void decodeMissing(byte[][] shards, + bool[] shardPresent, + int offset, + int byteCount) + { + // Check arguments. + checkBuffersAndSizes(shards, offset, byteCount); + + // Quick check: are all of the shards present? If so, there's + // nothing to do. + int numberPresent = 0; + for (int i = 0; i < totalShardCount; i++) + { + if (shardPresent[i]) + { + numberPresent += 1; + } + } + + if (numberPresent == totalShardCount) + { + // Cool. All of the shards data data. We don't + // need to do anything. + return; + } + + // More complete sanity check + if (numberPresent < dataShardCount) + { + throw new Exception("Not enough shards present"); + } + + // Pull out the rows of the matrix that correspond to the + // shards that we have and build a square matrix. This + // matrix could be used to generate the shards that we have + // from the original data. + // + // Also, pull out an array holding just the shards that + // correspond to the rows of the submatrix. These shards + // will be the input to the decoding process that re-creates + // the missing data shards. + Matrix subMatrix = new Matrix(dataShardCount, dataShardCount); + byte[][] subShards = new byte [dataShardCount][]; + { + int subMatrixRow = 0; + for (int matrixRow = 0; matrixRow < totalShardCount && subMatrixRow < dataShardCount; matrixRow++) + { + if (shardPresent[matrixRow]) + { + for (int c = 0; c < dataShardCount; c++) + { + subMatrix.set(subMatrixRow, c, matrix.get(matrixRow, c)); + } + + subShards[subMatrixRow] = shards[matrixRow]; + subMatrixRow += 1; + } + } + } + + // Invert the matrix, so we can go from the encoded shards + // back to the original data. Then pull out the row that + // generates the shard that we want to decode. Note that + // since this matrix maps back to the orginal data, it can + // be used to create a data shard, but not a parity shard. + Matrix dataDecodeMatrix = subMatrix.invert(); + + // Re-create any data shards that were missing. + // + // The input to the coding is all of the shards we actually + // have, and the output is the missing data shards. The computation + // is done using the special decode matrix we just built. + byte[][] outputs = new byte [parityShardCount][]; + byte[][] matrixRows = new byte [parityShardCount][]; + int outputCount = 0; + for (int iShard = 0; iShard < dataShardCount; iShard++) + { + if (!shardPresent[iShard]) + { + outputs[outputCount] = shards[iShard]; + matrixRows[outputCount] = dataDecodeMatrix.getRow(iShard); + outputCount += 1; + } + } + + codingLoop.codeSomeShards( + matrixRows, + subShards, dataShardCount, + outputs, outputCount, + offset, byteCount); + + // Now that we have all of the data shards intact, we can + // compute any of the parity that is missing. + // + // The input to the coding is ALL of the data shards, including + // any that we just calculated. The output is whichever of the + // data shards were missing. + outputCount = 0; + for (int iShard = dataShardCount; iShard < totalShardCount; iShard++) + { + if (!shardPresent[iShard]) + { + outputs[outputCount] = shards[iShard]; + matrixRows[outputCount] = parityRows[iShard - dataShardCount]; + outputCount += 1; + } + } + + codingLoop.codeSomeShards( + matrixRows, + shards, dataShardCount, + outputs, outputCount, + offset, byteCount); + } + + /** + * Checks the consistency of arguments passed to public methods. + */ + private void checkBuffersAndSizes(byte[][] shards, int offset, int byteCount) + { + // The number of buffers should be equal to the number of + // data shards plus the number of parity shards. + if (shards.Length != totalShardCount) + { + throw new Exception("wrong number of shards: " + shards.Length); + } + + // All of the shard buffers should be the same length. + int shardLength = 0; + bool allShardIsEmpty = true; + //int shardLength = shards[0].length; + for (int i = 1; i < shards.Length; i++) + { + if (shards[i] == null) + continue; + allShardIsEmpty = false; + if (shardLength == 0) + { + shardLength = shards[i].Length; + continue; + } + + if (shards[i].Length != shardLength) + { + throw new Exception("Shards are different sizes"); + } + } + + if (allShardIsEmpty) + { + throw new Exception("Shards are empty"); + } + + // The offset and byteCount must be non-negative and fit in the buffers. + if (offset < 0) + { + throw new Exception("offset is negative: " + offset); + } + + if (byteCount < 0) + { + throw new Exception("byteCount is negative: " + byteCount); + } + + if (shardLength < offset + byteCount) + { + throw new Exception("buffers to small: " + byteCount + offset); + } + } + + /** + * Create the matrix to use for encoding, given the number of + * data shards and the number of total shards. + * + * The top square of the matrix is guaranteed to be an identity + * matrix, which means that the data shards are unchanged after + * encoding. + */ + private static Matrix buildMatrix(int dataShards, int totalShards) + { + // Start with a Vandermonde matrix. This matrix would work, + // in theory, but doesn't have the property that the data + // shards are unchanged after encoding. + Matrix matrix = vandermonde(totalShards, dataShards); + + // Multiple by the inverse of the top square of the matrix. + // This will make the top square be the identity matrix, but + // preserve the property that any square subset of rows is + // invertible. + Matrix top = matrix.submatrix(0, 0, dataShards, dataShards); + return matrix.times(top.invert()); + } + + /** + * Create a Vandermonde matrix, which is guaranteed to have the + * property that any subset of rows that forms a square matrix + * is invertible. + * + * @param rows Number of rows in the result. + * @param cols Number of columns in the result. + * @return A Matrix. + */ + private static Matrix vandermonde(int rows, int cols) + { + Matrix result = new Matrix(rows, cols); + try + { + for (int r = 0; r < rows; r++) + { + for (int c = 0; c < cols; c++) + { + result.set(r, c, Galois.exp((byte) r, c)); + } + } + } + catch (Exception e) + { + Console.WriteLine(e); + throw; + } + + return result; + } + + public int DataShardCount => dataShardCount; + + public int ParityShardCount => parityShardCount; + + public int TotalShardCount => totalShardCount; + + public byte[][] ParityRows => parityRows; + + public Matrix Matrix => matrix; + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomon.cs.meta b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomon.cs.meta new file mode 100644 index 0000000..c0b111f --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomon.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: fe9a8bba122aa4b039626cd7fdc0c027 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonBenchmark.cs b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonBenchmark.cs new file mode 100644 index 0000000..b3d259b --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonBenchmark.cs @@ -0,0 +1,286 @@ +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Text; + +namespace fec +{ + public class ReedSolomonBenchmark + { + public static readonly CodingLoop[] ALL_CODING_LOOPS = + new CodingLoop[] { + new InputOutputByteTableCodingLoop(), + new OutputInputByteTableCodingLoop(), + }; + + + private const int DATA_COUNT = 17; + + private const int PARITY_COUNT = 3; + + private const int TOTAL_COUNT = DATA_COUNT + PARITY_COUNT; + + private const int BUFFER_SIZE = 200 * 1000; + + private const int PROCESSOR_CACHE_SIZE = 10 * 1024 * 1024; + + private const int TWICE_PROCESSOR_CACHE_SIZE = 2 * PROCESSOR_CACHE_SIZE; + + private const int NUMBER_OF_BUFFER_SETS = TWICE_PROCESSOR_CACHE_SIZE / DATA_COUNT / BUFFER_SIZE + 1; + + + + private const long MEASUREMENT_DURATION = 2 * 1000; + + + + private static readonly Random random = new Random(); + + + + private int nextBuffer = 0; + + + public void run() + { + Console.WriteLine("preparing..."); + BufferSet[] bufferSets = new BufferSet [NUMBER_OF_BUFFER_SETS]; + for (int iBufferSet = 0; iBufferSet < NUMBER_OF_BUFFER_SETS; iBufferSet++) + { + bufferSets[iBufferSet] = new BufferSet(); + } + + byte[] tempBuffer = new byte [BUFFER_SIZE]; + + List summaryLines = new List(); + StringBuilder csv = new StringBuilder(); + csv.Append("Outer,Middle,Inner,Multiply,Encode,Check\n"); + foreach (var codingLoop in ALL_CODING_LOOPS) + { + Measurement encodeAverage = new Measurement(); + { + String testName = codingLoop.GetType().Name + " encodeParity"; + + + Console.WriteLine("\nTEST: " + testName); + ReedSolomon codec = new ReedSolomon(DATA_COUNT, PARITY_COUNT, codingLoop); + Console.WriteLine(" warm up..."); + doOneEncodeMeasurement(codec, bufferSets); + doOneEncodeMeasurement(codec, bufferSets); + Console.WriteLine(" testing..."); + for (int iMeasurement = 0; iMeasurement < 10; iMeasurement++) + { + encodeAverage.add(doOneEncodeMeasurement(codec, bufferSets)); + } + + Console.WriteLine("AVERAGE: {0}", encodeAverage); + summaryLines.Add(testName+" "+encodeAverage); + } + // The encoding test should have filled all of the buffers with + // correct parity, so we can benchmark parity checking. + Measurement checkAverage = new Measurement(); + { + String testName = codingLoop.GetType().Name + " isParityCorrect"; + Console.WriteLine("\nTEST: " + testName); + ReedSolomon codec = new ReedSolomon(DATA_COUNT, PARITY_COUNT, codingLoop); + Console.WriteLine(" warm up..."); + doOneEncodeMeasurement(codec, bufferSets); + doOneEncodeMeasurement(codec, bufferSets); + Console.WriteLine(" testing..."); + for (int iMeasurement = 0; iMeasurement < 10; iMeasurement++) + { + checkAverage.add(doOneCheckMeasurement(codec, bufferSets, tempBuffer)); + } + + Console.WriteLine("AVERAGE: {0}", checkAverage); + summaryLines.Add(testName+" "+checkAverage); + } + csv.Append(codingLoopNameToCsvPrefix(codingLoop.GetType().Name)); + csv.Append((int)encodeAverage.getRate()); + csv.Append(","); + csv.Append((int)checkAverage.getRate()); + csv.Append("\n"); + } + + Console.WriteLine("\n"); + Console.WriteLine(csv.ToString()); + + Console.WriteLine("\nSummary:\n"); + foreach (var line in summaryLines) + { Console.WriteLine(line); + } + } + + private Measurement doOneEncodeMeasurement(ReedSolomon codec, BufferSet[] bufferSets) + { + long passesCompleted = 0; + long bytesEncoded = 0; + long encodingTime = 0; + while (encodingTime < MEASUREMENT_DURATION) + { + BufferSet bufferSet = bufferSets[nextBuffer]; + nextBuffer = (nextBuffer + 1) % bufferSets.Length; + byte[][] shards = bufferSet.buffers; + Stopwatch stopwatch = new Stopwatch(); + stopwatch.Start(); + + codec.encodeParity(shards, 0, BUFFER_SIZE); + stopwatch.Stop(); + long stop = stopwatch.ElapsedMilliseconds; + TimeSpan timespan = stopwatch.Elapsed;  + encodingTime += (long)timespan.TotalMilliseconds; + bytesEncoded += BUFFER_SIZE * DATA_COUNT; + passesCompleted += 1; + } + + double seconds = ((double) encodingTime) / 1000.0; + double megabytes = ((double) bytesEncoded) / 1000000.0; + Measurement result = new Measurement(megabytes, seconds); + Console.WriteLine(" {0} passes, {1}", passesCompleted, result.ToString()); + return result; + } + + private Measurement doOneCheckMeasurement(ReedSolomon codec, BufferSet[] bufferSets, byte[] tempBuffer) + { + long passesCompleted = 0; + long bytesChecked = 0; + long checkingTime = 0; + while (checkingTime < MEASUREMENT_DURATION) + { + BufferSet bufferSet = bufferSets[nextBuffer]; + nextBuffer = (nextBuffer + 1) % bufferSets.Length; + byte[][] shards = bufferSet.buffers; + + Stopwatch stopwatch = new Stopwatch(); + stopwatch.Start(); + + if (!codec.isParityCorrect(shards, 0, BUFFER_SIZE, tempBuffer)) + { + // if the parity is not correct, it will throw off the + // benchmarking because it may return early. + throw new Exception("parity not correct"); + } + + stopwatch.Stop(); //  停止监视 + TimeSpan timespan = stopwatch.Elapsed;  + checkingTime += (long)timespan.TotalMilliseconds; + bytesChecked += BUFFER_SIZE * DATA_COUNT; + passesCompleted += 1; + } + + double seconds = ((double) checkingTime) / 1000.0; + double megabytes = ((double) bytesChecked) / 1000000.0; + Measurement result = new Measurement(megabytes, seconds); + Console.WriteLine(" {0} passes, {1}", passesCompleted, result); + return result; + } + + /** + * Converts a name like "OutputByteInputTableCodingLoop" to + * "output,byte,input,table,". + */ + private static string codingLoopNameToCsvPrefix(string className) + { + List names = splitCamelCase(className); + return + names[0] + "," + + names[1] + "," + + names[2] + "," + + names[3] + ","; + } + + /** + * Converts a name like "OutputByteInputTableCodingLoop" to a List of + * words: { "output", "byte", "input", "table", "coding", "loop" } + */ + private static List splitCamelCase(string className) + { + string remaining = className; + List result = new List(); + while (remaining.Length!=0) + { + bool found = false; + for (int i = 1; i < remaining.Length; i++) + { + if (remaining[i] >= 'A' && remaining[i] <= 'Z') + { + result.Add(remaining.Substring(0, i)); + remaining = remaining.Substring(i); + found = true; + break; + } + } + + if (!found) + { + result.Add(remaining); + remaining = ""; + } + } + + return result; + } + + + private class BufferSet + { + public readonly byte[][] buffers; + + public readonly byte[] bigBuffer; + + public BufferSet() + { + buffers = new byte [TOTAL_COUNT][]; + for (int iBuffer = 0; iBuffer < TOTAL_COUNT; iBuffer++) + { + byte[] buffer = new byte[BUFFER_SIZE]; + buffers[iBuffer] = buffer; + for (int iByte = 0; iByte < BUFFER_SIZE; iByte++) + { + buffer[iByte] = (byte) random.Next(256); + } + } + + bigBuffer = new byte [TOTAL_COUNT * BUFFER_SIZE]; + for (int i = 0; i < TOTAL_COUNT * BUFFER_SIZE; i++) + { + bigBuffer[i] = (byte) random.Next(256); + } + } + } + + private class Measurement + { + private double megabytes; + private double seconds; + + public Measurement() + { + this.megabytes = 0.0; + this.seconds = 0.0; + } + + public Measurement(double megabytes, double seconds) + { + this.megabytes = megabytes; + this.seconds = seconds; + } + + public void add(Measurement other) + { + megabytes += other.megabytes; + seconds += other.seconds; + } + + public double getRate() + { + return megabytes / seconds; + } + + public override string ToString() + { + return string.Format((int)getRate()+"MB/s" ); + } + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonBenchmark.cs.meta b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonBenchmark.cs.meta new file mode 100644 index 0000000..9f32132 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonBenchmark.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: d3e68ffed3ac84707ba475dc4021f1f7 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonTest.cs b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonTest.cs new file mode 100644 index 0000000..19903dd --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonTest.cs @@ -0,0 +1,202 @@ +using System; +using System.Collections.Generic; + +namespace fec +{ + public class ReedSolomonTest + { + + + /** + * Try encoding and decoding with a lot of shards. + */ + public void testBigEncodeDecode() { + Random random = new Random(0); + for (int k = 0; k < 1000; k++) + { + int dataCount = 64; + int parityCount = 64; + int shardSize = 200; + byte [] [] dataShards = new byte [dataCount] []; + for (var j = 0; j < dataShards.Length; j++) + { + var shard = dataShards[j] = new byte[shardSize]; + for (int i = 0; i < shard.Length; i++) { + shard[i] = (byte) random.Next(256); + } + } + runEncodeDecode(dataCount, parityCount, dataShards); + } + + Console.WriteLine("测试完成"); + } + + /** + * Encodes a set of data shards, and then tries decoding + * using all possible subsets of the encoded shards. + * + * Uses 5+5 coding, so there must be 5 input data shards. + */ + private void runEncodeDecode(int dataCount, int parityCount, byte[][] dataShards) { + + int totalCount = dataCount + parityCount; + int shardLength = dataShards[0].Length; + + // Make the list of data and parity shards. +// assertEquals(dataCount, dataShards.length); + int dataLength = dataShards[0].Length; + byte [] [] allShards = new byte [totalCount] []; + for (int i = 0; i < dataCount; i++) { + byte[] temp = new byte[dataLength]; + Array.Copy(dataShards[i],0,temp,0,dataLength); + allShards[i] = temp; + } + for (int i = dataCount; i < totalCount; i++) { + allShards[i] = new byte [dataLength]; + } + + // Encode. + ReedSolomon codec = ReedSolomon.create(dataCount, parityCount); + codec.encodeParity(allShards, 0, dataLength); + + // Make a copy to decode with. + byte [] [] testShards = new byte [totalCount] []; + bool [] shardPresent = new bool [totalCount]; + for (int i = 0; i < totalCount; i++) { + byte[] temp = new byte[shardLength]; + Array.Copy(allShards[i],0,temp,0,shardLength); + testShards[i] = temp; + shardPresent[i] = true; + } + + // Decode with 0, 1, ..., 5 shards missing. + for (int numberMissing = 0; numberMissing < parityCount + 1; numberMissing++) { + tryAllSubsetsMissing(codec, allShards, testShards, shardPresent, numberMissing); + } + } + + private void tryAllSubsetsMissing(ReedSolomon codec, + byte [] [] allShards, byte [] [] testShards, + bool [] shardPresent, int numberMissing) { + int shardLength = allShards[0].Length; + List subsets = allSubsets(numberMissing, 0, 10); + foreach (var subset in subsets) { + // Get rid of the shards specified by this subset. + foreach (var missingShard in subset) + { + clearBytes(testShards[missingShard]); + shardPresent[missingShard] = false; + } + + // Reconstruct the missing shards + codec.decodeMissing(testShards, shardPresent, 0, shardLength); + + // Check the results. After checking, the contents of testShards + // is ready for the next test, the next time through the loop. + checkShards(allShards, testShards); + + // Put the "present" flags back + for (int i = 0; i < codec.getTotalShardCount(); i++) { + shardPresent[i] = true; + } + } + } + + + + + + private void assertTrue(bool isParityCorrect) + { + throw new NotImplementedException(); + } + + private void assertFalse(bool isParityCorrect) + { + throw new NotImplementedException(); + } + + private void clearBytes(byte [] data) { + for (int i = 0; i < data.Length; i++) { + data[i] = 0; + } + } + + private void checkShards(byte[][] expectedShards, byte[][] actualShards) { + assertEquals(expectedShards.Length, actualShards.Length); + for (int i = 0; i < expectedShards.Length; i++) { + assertArrayEquals(expectedShards[i], actualShards[i]); + } + } + + private void assertArrayEquals(byte[] expectedShard, byte[] actualShard) + { + + var len1 = expectedShard.Length; + var len2 = actualShard.Length; + if (len1 != len2) + { + throw new NotImplementedException(); + } + for (var i = 0; i < len1; i++) + { + if (expectedShard[i] != actualShard[i]) + { + throw new NotImplementedException(); + } + } + } + + private void assertEquals(int expectedShardsLength, int actualShardsLength) + { + if (expectedShardsLength != actualShardsLength) + { + throw new NotImplementedException(); + } + } + + + /** + * Returns a list of arrays with all possible sets of + * unique values where (min <= n < max). + * + * This is NOT EFFICIENT, because it allocates lots of + * temporary arrays, but it's OK for these tests. + * + * To avoid duplicates that are in a different order, + * each subset is generated with elements in increasing + * order. + * + * Given (n=2, min=1, max=4), returns: + * [1, 2] + * [1, 3] + * [1, 4] + * [2, 3] + * [2, 4] + * [3, 4] + */ + private List allSubsets(int n, int min, int max) { + List result = new List(); + if (n == 0) { + result.Add(new int [0]); + } + else { + for (int i = min; i < max - n; i++) { + int [] prefix = { i }; + foreach (var suffix in allSubsets(n - 1, i + 1, max)) + { + result.Add(appendIntArrays(prefix, suffix)); + } + } + } + return result; + } + + private int [] appendIntArrays(int [] a, int [] b) { + int [] result = new int[a.Length + b.Length]; + Array.Copy(a, 0, result, 0, a.Length); + Array.Copy(b, 0, result, a.Length, b.Length); + return result; + } + } +} \ No newline at end of file diff --git a/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonTest.cs.meta b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonTest.cs.meta new file mode 100644 index 0000000..ceb0045 --- /dev/null +++ b/Runtime/csharp-kcp/reedsolomon_csharp/ReedSolomonTest.cs.meta @@ -0,0 +1,11 @@ +fileFormatVersion: 2 +guid: cc802d487f5a44b30bef46ff82b32674 +MonoImporter: + externalObjects: {} + serializedVersion: 2 + defaultReferences: [] + executionOrder: 0 + icon: {instanceID: 0} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Assets/GuruKCP/package.json b/package.json similarity index 100% rename from Assets/GuruKCP/package.json rename to package.json diff --git a/Assets/GuruKCP/package.json.meta b/package.json.meta similarity index 100% rename from Assets/GuruKCP/package.json.meta rename to package.json.meta