我已经使用标准字节数组等编写了json字节序列化程序,并且工作正常。为了进行比较,我尝试使用在System.IO.Pipelines上可以找到的很少信息来实现管道编写器。
它的运行速度过慢,这可能是由于我对它的经验有限,或者可能不适用于此用例。我希望能得到一些帮助,以帮助您理解为什么速度如此之慢,如果不是那么慢,那我就错了。
我的目标是NetStandard 2.0和NetCore 2.1,因此无法访问Encoding。ASCII跨度重载。
https://github.com/benardmens/PipelinePerf上的示例仓库
或下面的一个寻呼机爱好者代码
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Serialization;
using NodaTime;
using NodaTime.Serialization.JsonNet;
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.IO;
using System.IO.Compression;
using System.IO.Pipelines;
using System.Linq;
using System.Net;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
namespace OnePage
{
#region support
internal static class ConverterCache
{
#region variables
#endregion
#region properties
internal static Dictionary<Type, JsonConverter> Converters { get; }
internal static List<JsonConverter> CreationConverters { get; }
#endregion
#region construction
static ConverterCache()
{
var types = Assembly.GetAssembly(typeof(ConverterCache)).GetTypes();
Converters = new Dictionary<Type, JsonConverter>();
var converterTypes = types.
Where(x => !x.IsAbstract && !x.IsInterface && typeof(IConverterDescriptor).IsAssignableFrom(x)).
ToArray();
foreach (var converterDescriptor in converterTypes.Select(x => Activator.CreateInstance(x) as IConverterDescriptor).Where(x => x != null))
{
Converters[converterDescriptor.TypeToConvert] = converterDescriptor as JsonConverter;
}
CreationConverters = types.
Where(x => typeof(JsonConverter).
IsAssignableFrom(x)).
Where(x => x.BaseType != null && x.BaseType.IsGenericType && x.BaseType.GetGenericTypeDefinition() == typeof(CustomCreationConverter<>)).
Select(x => Activator.CreateInstance(x) as JsonConverter).
ToList();
}
#endregion
#region methods
#endregion
#region event methods
#endregion
#region overrides
#endregion
}
public interface IConverterDescriptor
{
#region variables
#endregion
#region properties
Type TypeToConvert { get; }
#endregion
#region construction
#endregion
#region methods
#endregion
#region event methods
#endregion
#region overrides
#endregion
}
public class IpCreationConverter : CustomCreationConverter<IPAddress>
{
#region variables
#endregion
#region properties
#endregion
#region construction
#endregion
#region methods
#endregion
#region event methods
#endregion
#region overrides
public override bool CanConvert(Type objectType)
{
var retVal = objectType == typeof(IPAddress);
if (retVal)
return true;
return objectType == typeof(IPAddress);
}
public override IPAddress Create(Type objectType)
{
return IPAddress.Parse("0.0.0.0");
}
#endregion
}
public class IpEndPointCreationConverter : CustomCreationConverter<IPEndPoint>
{
#region variables
#endregion
#region properties
#endregion
#region construction
#endregion
#region methods
#endregion
#region event methods
#endregion
#region overrides
public override bool CanConvert(Type objectType)
{
var retVal = objectType == typeof(IPEndPoint);
if (retVal)
return true;
return objectType == typeof(IPEndPoint);
}
public override IPEndPoint Create(Type objectType)
{
return new IPEndPoint(IPAddress.Parse("0.0.0.0"), 0);
}
#endregion
}
public interface IRecordDataSerializer
{
Task<bool> Serialize(object record, PipeWriter writer);
Task<bool> SerializeCompressed(object record, PipeWriter writer);
}
public class ObjectToJsonRecordSerializer : IRecordDataSerializer
{
public async Task<bool> Serialize(object record, PipeWriter writer)
{
try
{
var bytes = Encoding.ASCII.GetBytes(Serializer.Serialize(record, Formatting.None));
var sizeBytes = BitConverter.GetBytes(bytes.Length);
await writer.WriteAndAdvance(sizeBytes);
await writer.WriteAndAdvance(bytes);
}
catch (Exception)
{
return false;
}
return true;
}
private readonly ArrayPool<byte> _arrayPool = ArrayPool<byte>.Shared;
private byte[] _rented;
public async Task<bool> SerializeCompressed(object record, PipeWriter writer)
{
try
{
var data = Encoding.ASCII.GetBytes(Serializer.Serialize(record, Formatting.None));
_rented = _arrayPool.Rent(data.Length * 2);
using (var compressedStream = new MemoryStream(_rented))
using (var zipStream = new GZipStream(compressedStream, CompressionMode.Compress))
{
zipStream.Write(data, 0, data.Length);
var sizeBytes = BitConverter.GetBytes((int)compressedStream.Position);
await writer.WriteAndAdvance(sizeBytes);
//get the slice of the stream actually written to and persist
Memory<byte> memSlice = _rented;
await writer.WriteAndAdvance(memSlice.Slice(0, (int)compressedStream.Position));
_arrayPool.Return(_rented);
zipStream.Close();
}
}
catch (Exception)
{
_arrayPool.Return(_rented);
return false;
}
return true;
}
}
public class Record
{
public Record(Instant recordTime, IEnumerable<RecordEntry> entries)
{
Entries = entries.ToImmutableList();
RecordTime = recordTime;
}
public Record()
{
}
public ImmutableList<RecordEntry> Entries { get; set; }
public Instant RecordTime { get; set; }
}
public class RecordEntry
{
public RecordEntry(object value)
{
Value = value;
}
public RecordEntry()
{
}
public object Value { get; set; }
}
public class Resolver : DefaultContractResolver
{
#region variables
#endregion
#region properties
#endregion
#region construction
#endregion
#region methods
protected override JsonContract CreateContract(Type objectType)
{
var contract = base.CreateContract(objectType);
if (ConverterCache.Converters.TryGetValue(objectType, out JsonConverter converter))
contract.Converter = converter;
return contract;
}
protected override IList<JsonProperty> CreateProperties(Type type, MemberSerialization memberSerialization)
{
//return base.CreateProperties(type, memberSerialization).ToList();
return base.CreateProperties(type, memberSerialization).Where(p => p.Writable).ToList();
}
#endregion
#region event methods
#endregion
#region overrides
#endregion
}
public static class Serializer
{
#region variables
private static readonly JsonSerializerSettings Settings = new JsonSerializerSettings
{
TypeNameHandling = TypeNameHandling.Auto,
ObjectCreationHandling = ObjectCreationHandling.Replace,
ContractResolver = new Resolver(),
Converters = ConverterCache.CreationConverters,
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
};
#endregion
#region properties
#endregion
#region construction
static Serializer()
{
Settings.ConfigureForNodaTime(DateTimeZoneProviders.Tzdb);
}
#endregion
#region methods
/// <summary>
/// Deserializes a json string into the specified type
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="content"></param>
/// <returns></returns>
public static T Deserialize<T>(string content)
{
return JsonConvert.DeserializeObject<T>(content, Settings);
}
/// <summary>
/// Serializes object into a json string
/// </summary>
/// <param name="subject"></param>
/// <param name="formatting"></param>
/// <returns></returns>
public static string Serialize(object subject, Formatting formatting = Formatting.Indented)
{
return JsonConvert.SerializeObject(subject, formatting, Settings);
}
#endregion
#region event methods
#endregion
#region overrides
#endregion
}
#endregion
public static class PipeWriterExtensions
{
public static async Task WriteAndAdvance(this PipeWriter writer, byte[] data)
{
await writer.WriteAsync(data);
}
public static async Task WriteAndAdvance(this PipeWriter writer, Memory<byte> data)
{
await writer.WriteAsync(data);
}
}
public class AsyncToBinaryConverter
{
private readonly string[] _headers;
public event Action<PipeReader> OnBytes;
private readonly Pipe _pipe = new Pipe();
private bool _infoWritten;
private readonly IRecordDataSerializer _serializer;
private readonly object[] _array = new object[1];
private PipeWriter _writer;
public AsyncToBinaryConverter(IEnumerable<string> headers, IRecordDataSerializer serializer)
{
_serializer = serializer;
_headers = headers.ToArray();
}
public Task<bool> WriteAsync(object subject)
{
_array[0] = subject;
return Write(_array);
}
public async Task<bool> Write<TSubjectType>(IEnumerable<TSubjectType> subjects)
{
_pipe.Writer.Complete();
_pipe.Reader.Complete();
_pipe.Reset();
//if any fails we cannot output
_writer = _pipe.Writer;
foreach (var subject in subjects)
{
var success = await Convert(subject);
if (!success) return false;
}
await _writer.FlushAsync();
//process and output the buffer
OnBytes?.Invoke(_pipe.Reader);
return true;
}
private async Task<bool> Convert(object subject)
{
try
{
//only write the type and headers once
if (!_infoWritten)
{
var result = await AddInfoBytes(subject.GetType(), _writer);
if (!result)
{
return false;
}
_infoWritten = true;
}
//serialize and compress the sample
await _serializer.SerializeCompressed(subject, _writer);
return true;
}
catch
{
return false;
}
}
private async Task<bool> AddInfoBytes(Type subjectType, PipeWriter writer)
{
try
{
//get the string that's useable by Type.GetType
var typeString = $"{subjectType.AssemblyQualifiedName}";
var bytes = Encoding.ASCII.GetBytes(typeString);
var contentLength = bytes.Length;
var sizeBytes = BitConverter.GetBytes(contentLength);
//write the size...
await writer.WriteAndAdvance(sizeBytes);
//and the contents
await writer.WriteAndAdvance(bytes);
//write the header bytes
await _serializer.Serialize(_headers, writer);
}
catch (Exception ex)
{
return false;
}
return true;
}
}
}
和测试
[TestMethod]
public void Poc()
{
var random = new Random();
var writer = new AsyncToBinaryConverter(new[] { "", "as", "asdsa" }, new ObjectToJsonRecordSerializer());
// var reader = new FromBinaryConverter();
var count = 0;
writer.OnBytes += x =>
{
var m = x;
};
// reader.OnObject += x => count++;
var record = new Record(SystemClock.Instance.GetCurrentInstant(), Enumerable.Range(1, 50).Select(x => new RecordEntry(random.NextDouble())));
var records = Enumerable.Range(1, 10000).Select(x => record);
var sw = Stopwatch.StartNew();
writer.Write(records).Wait();
sw.Stop();
Console.WriteLine(sw.ElapsedMilliseconds);
//Assert.AreEqual(10000, count);
}
感谢阅读